diff --git a/.clang-format b/.clang-format new file mode 100644 index 000000000..37881bfc8 --- /dev/null +++ b/.clang-format @@ -0,0 +1,12 @@ +BasedOnStyle: Chromium +UseTab: Never +IndentWidth: 4 +TabWidth: 4 +AllowShortIfStatementsOnASingleLine: false +ColumnLimit: 0 +AccessModifierOffset: -4 +NamespaceIndentation: All +FixNamespaceComments: false +AlignAfterOpenBracket: true +AlignConsecutiveAssignments: true +IndentCaseLabels: true \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..64a58a781 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +build*/ +test/ + +.cache/ +*.swp +models/ \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 50e6a9227..4112ae9bb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,17 +4,36 @@ on: workflow_dispatch: # allows manual triggering inputs: create_release: - description: 'Create new release' + description: "Create new release" required: true type: boolean push: branches: - master - ci - paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu'] + paths: + [ + ".github/workflows/**", + "**/CMakeLists.txt", + "**/Makefile", + "**/*.h", + "**/*.hpp", + "**/*.c", + "**/*.cpp", + "**/*.cu", + ] pull_request: types: [opened, synchronize, reopened] - paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu'] + paths: + [ + "**/CMakeLists.txt", + "**/Makefile", + "**/*.h", + "**/*.hpp", + "**/*.c", + "**/*.cpp", + "**/*.cu", + ] env: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} @@ -30,7 +49,6 @@ jobs: with: submodules: recursive - - name: Dependencies id: depends run: | @@ -42,14 +60,37 @@ jobs: run: | mkdir build cd build - cmake .. + cmake .. -DGGML_AVX2=ON -DSD_BUILD_SHARED_LIBS=ON cmake --build . --config Release - #- name: Test - #id: cmake_test - #run: | - #cd build - #ctest --verbose --timeout 900 + - name: Get commit hash + id: commit + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/main' ) || github.event.inputs.create_release == 'true' }} + uses: pr-mpt/actions-commit-hash@v2 + + - name: Fetch system info + id: system-info + run: | + echo "CPU_ARCH=`uname -m`" >> "$GITHUB_OUTPUT" + echo "OS_NAME=`lsb_release -s -i`" >> "$GITHUB_OUTPUT" + echo "OS_VERSION=`lsb_release -s -r`" >> "$GITHUB_OUTPUT" + echo "OS_TYPE=`uname -s`" >> "$GITHUB_OUTPUT" + + - name: Pack artifacts + id: pack_artifacts + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} + run: | + cp ggml/LICENSE ./build/bin/ggml.txt + cp LICENSE ./build/bin/stable-diffusion.cpp.txt + zip -j sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip ./build/bin/* + + - name: Upload artifacts + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip + path: | + sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip macOS-latest-cmake: runs-on: macos-latest @@ -63,9 +104,8 @@ jobs: - name: Dependencies id: depends - continue-on-error: true run: | - brew update + brew install zip - name: Build id: cmake_build @@ -73,30 +113,61 @@ jobs: sysctl -a mkdir build cd build - cmake .. + cmake .. -DGGML_AVX2=ON -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" -DSD_BUILD_SHARED_LIBS=ON cmake --build . --config Release - #- name: Test - #id: cmake_test - #run: | - #cd build - #ctest --verbose --timeout 900 + - name: Get commit hash + id: commit + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/main' ) || github.event.inputs.create_release == 'true' }} + uses: pr-mpt/actions-commit-hash@v2 + + - name: Fetch system info + id: system-info + run: | + echo "CPU_ARCH=`uname -m`" >> "$GITHUB_OUTPUT" + echo "OS_NAME=`sw_vers -productName`" >> "$GITHUB_OUTPUT" + echo "OS_VERSION=`sw_vers -productVersion`" >> "$GITHUB_OUTPUT" + echo "OS_TYPE=`uname -s`" >> "$GITHUB_OUTPUT" + + - name: Pack artifacts + id: pack_artifacts + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} + run: | + cp ggml/LICENSE ./build/bin/ggml.txt + cp LICENSE ./build/bin/stable-diffusion.cpp.txt + zip -j sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip ./build/bin/* + + - name: Upload artifacts + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip + path: | + sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-${{ steps.system-info.outputs.OS_TYPE }}-${{ steps.system-info.outputs.OS_NAME }}-${{ steps.system-info.outputs.OS_VERSION }}-${{ steps.system-info.outputs.CPU_ARCH }}.zip windows-latest-cmake: - runs-on: windows-latest + runs-on: windows-2025 + + env: + VULKAN_VERSION: 1.3.261.1 strategy: matrix: include: - - build: 'noavx' - defines: '-DGGML_AVX=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF' - - build: 'avx2' - defines: '-DGGML_AVX2=ON' - - build: 'avx' - defines: '-DGGML_AVX2=OFF' - - build: 'avx512' - defines: '-DGGML_AVX512=ON' - + - build: "noavx" + defines: "-DGGML_NATIVE=OFF -DGGML_AVX=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF -DSD_BUILD_SHARED_LIBS=ON" + - build: "avx2" + defines: "-DGGML_NATIVE=OFF -DGGML_AVX2=ON -DSD_BUILD_SHARED_LIBS=ON" + - build: "avx" + defines: "-DGGML_NATIVE=OFF -DGGML_AVX=ON -DGGML_AVX2=OFF -DSD_BUILD_SHARED_LIBS=ON" + - build: "avx512" + defines: "-DGGML_NATIVE=OFF -DGGML_AVX512=ON -DGGML_AVX=ON -DGGML_AVX2=ON -DSD_BUILD_SHARED_LIBS=ON" + - build: "cuda12" + defines: "-DSD_CUDA=ON -DSD_BUILD_SHARED_LIBS=ON -DCMAKE_CUDA_ARCHITECTURES=90;89;80;75" + # - build: "rocm5.5" + # defines: '-G Ninja -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DSD_HIPBLAS=ON -DCMAKE_BUILD_TYPE=Release -DAMDGPU_TARGETS="gfx1100;gfx1102;gfx1030" -DSD_BUILD_SHARED_LIBS=ON' + - build: 'vulkan' + defines: "-DSD_VULKAN=ON -DSD_BUILD_SHARED_LIBS=ON" steps: - name: Clone id: checkout @@ -104,6 +175,37 @@ jobs: with: submodules: recursive + - name: Install cuda-toolkit + id: cuda-toolkit + if: ${{ matrix.build == 'cuda12' }} + uses: Jimver/cuda-toolkit@v0.2.19 + with: + cuda: "12.6.2" + method: "network" + sub-packages: '["nvcc", "cudart", "cublas", "cublas_dev", "thrust", "visual_studio_integration"]' + + - name: Install rocm-toolkit + id: rocm-toolkit + if: ${{ matrix.build == 'rocm5.5' }} + uses: Cyberhan123/rocm-toolkit@v0.1.0 + with: + rocm: "5.5.0" + + - name: Install Ninja + id: install-ninja + if: ${{ matrix.build == 'rocm5.5' }} + uses: urkle/action-get-ninja@v1 + with: + version: 1.11.1 + - name: Install Vulkan SDK + id: get_vulkan + if: ${{ matrix.build == 'vulkan' }} + run: | + curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/VulkanSDK-${env:VULKAN_VERSION}-Installer.exe" + & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install + Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}" + Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin" + - name: Build id: cmake_build run: | @@ -125,12 +227,6 @@ jobs: & $cl /O2 /GS- /kernel avx512f.c /link /nodefaultlib /entry:main .\avx512f.exe && echo "AVX512F: YES" && ( echo HAS_AVX512F=1 >> $env:GITHUB_ENV ) || echo "AVX512F: NO" - #- name: Test - #id: cmake_test - #run: | - #cd build - #ctest -C Release --verbose --timeout 900 - - name: Get commit hash id: commit if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} @@ -140,14 +236,44 @@ jobs: id: pack_artifacts if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} run: | - Copy-Item ggml/LICENSE .\build\bin\Release\ggml.txt - Copy-Item LICENSE .\build\bin\Release\stable-diffusion.cpp.txt - 7z a sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-win-${{ matrix.build }}-x64.zip .\build\bin\Release\* + $filePath = ".\build\bin\Release\*" + if (Test-Path $filePath) { + echo "Exists at path $filePath" + Copy-Item ggml/LICENSE .\build\bin\Release\ggml.txt + Copy-Item LICENSE .\build\bin\Release\stable-diffusion.cpp.txt + } elseif (Test-Path ".\build\bin\stable-diffusion.dll") { + $filePath = ".\build\bin\*" + echo "Exists at path $filePath" + Copy-Item ggml/LICENSE .\build\bin\ggml.txt + Copy-Item LICENSE .\build\bin\stable-diffusion.cpp.txt + } else { + ls .\build\bin + throw "Can't find stable-diffusion.dll" + } + 7z a sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-win-${{ matrix.build }}-x64.zip $filePath + + - name: Copy and pack Cuda runtime + id: pack_cuda_runtime + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' && matrix.build == 'cuda12' ) || github.event.inputs.create_release == 'true' }} + run: | + echo "Cuda install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" + $dst='.\build\bin\cudart\' + robocopy "${{steps.cuda-toolkit.outputs.CUDA_PATH}}\bin" $dst cudart64_*.dll cublas64_*.dll cublasLt64_*.dll + 7z a cudart-sd-bin-win-cu12-x64.zip $dst\* + + - name: Upload Cuda runtime + if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' && matrix.build == 'cuda12' ) || github.event.inputs.create_release == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: sd-cudart-sd-bin-win-cu12-x64.zip + path: | + cudart-sd-bin-win-cu12-x64.zip - name: Upload artifacts if: ${{ ( github.event_name == 'push' && github.ref == 'refs/heads/master' ) || github.event.inputs.create_release == 'true' }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: + name: sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-win-${{ matrix.build }}-x64.zip path: | sd-${{ env.BRANCH_NAME }}-${{ steps.commit.outputs.short }}-bin-win-${{ matrix.build }}-x64.zip @@ -164,7 +290,11 @@ jobs: steps: - name: Download artifacts id: download-artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 + with: + path: ./artifact + pattern: sd-* + merge-multiple: true - name: Get commit hash id: commit diff --git a/.gitignore b/.gitignore index 59a8a2cab..38fe570df 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,13 @@ build*/ test/ - +.vscode/ .cache/ *.swp +.vscode/ +*.bat +*.bin +*.exe +*.gguf +output*.png +models* +*.log \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index cc639feee..d9d943713 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "ggml"] - path = ggml - url = https://github.com/leejet/ggml.git + path = ggml + url = https://github.com/ggerganov/ggml.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 6f1930775..06de0d58b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -24,17 +24,113 @@ endif() # general #option(SD_BUILD_TESTS "sd: build tests" ${SD_STANDALONE}) option(SD_BUILD_EXAMPLES "sd: build examples" ${SD_STANDALONE}) +option(SD_CUDA "sd: cuda backend" OFF) +option(SD_HIPBLAS "sd: rocm backend" OFF) +option(SD_METAL "sd: metal backend" OFF) +option(SD_VULKAN "sd: vulkan backend" OFF) +option(SD_OPENCL "sd: opencl backend" OFF) +option(SD_SYCL "sd: sycl backend" OFF) +option(SD_MUSA "sd: musa backend" OFF) +option(SD_FAST_SOFTMAX "sd: x1.5 faster softmax, indeterministic (sometimes, same seed don't generate same image), cuda only" OFF) +option(SD_BUILD_SHARED_LIBS "sd: build shared libs" OFF) #option(SD_BUILD_SERVER "sd: build server example" ON) +if(SD_CUDA) + message("-- Use CUDA as backend stable-diffusion") + set(GGML_CUDA ON) + add_definitions(-DSD_USE_CUDA) +endif() -# deps -add_subdirectory(ggml) +if(SD_METAL) + message("-- Use Metal as backend stable-diffusion") + set(GGML_METAL ON) + add_definitions(-DSD_USE_METAL) +endif() + +if (SD_VULKAN) + message("-- Use Vulkan as backend stable-diffusion") + set(GGML_VULKAN ON) + add_definitions(-DSD_USE_VULKAN) +endif () + +if (SD_OPENCL) + message("-- Use OpenCL as backend stable-diffusion") + set(GGML_OPENCL ON) + add_definitions(-DSD_USE_OPENCL) +endif () + +if (SD_HIPBLAS) + message("-- Use HIPBLAS as backend stable-diffusion") + set(GGML_HIP ON) + add_definitions(-DSD_USE_CUDA) + if(SD_FAST_SOFTMAX) + set(GGML_CUDA_FAST_SOFTMAX ON) + endif() +endif () + +if(SD_MUSA) + message("-- Use MUSA as backend stable-diffusion") + set(GGML_MUSA ON) + add_definitions(-DSD_USE_CUDA) + if(SD_FAST_SOFTMAX) + set(GGML_CUDA_FAST_SOFTMAX ON) + endif() +endif() set(SD_LIB stable-diffusion) -add_library(${SD_LIB} stable-diffusion.h stable-diffusion.cpp) -target_link_libraries(${SD_LIB} PUBLIC ggml) -target_include_directories(${SD_LIB} PUBLIC .) +file(GLOB SD_LIB_SOURCES + "*.h" + "*.cpp" + "*.hpp" +) + +# we can get only one share lib +if(SD_BUILD_SHARED_LIBS) + message("-- Build shared library") + message(${SD_LIB_SOURCES}) + set(BUILD_SHARED_LIBS OFF) + add_library(${SD_LIB} SHARED ${SD_LIB_SOURCES}) + add_definitions(-DSD_BUILD_SHARED_LIB) + target_compile_definitions(${SD_LIB} PRIVATE -DSD_BUILD_DLL) + set(CMAKE_POSITION_INDEPENDENT_CODE ON) +else() + message("-- Build static library") + set(BUILD_SHARED_LIBS OFF) + add_library(${SD_LIB} STATIC ${SD_LIB_SOURCES}) +endif() + +if(SD_SYCL) + message("-- Use SYCL as backend stable-diffusion") + set(GGML_SYCL ON) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-narrowing -fsycl") + add_definitions(-DSD_USE_SYCL) + # disable fast-math on host, see: + # https://www.intel.com/content/www/us/en/docs/cpp-compiler/developer-guide-reference/2021-10/fp-model-fp.html + if (WIN32) + set(SYCL_COMPILE_OPTIONS /fp:precise) + else() + set(SYCL_COMPILE_OPTIONS -fp-model=precise) + endif() + message("-- Turn off fast-math for host in SYCL backend") + target_compile_options(${SD_LIB} PRIVATE ${SYCL_COMPILE_OPTIONS}) +endif() + +set(CMAKE_POLICY_DEFAULT_CMP0077 NEW) + +# see https://github.com/ggerganov/ggml/pull/682 +add_definitions(-DGGML_MAX_NAME=128) + +# deps +# Only add ggml if it hasn't been added yet +if (NOT TARGET ggml) + add_subdirectory(ggml) +endif() + +add_subdirectory(thirdparty) + +target_link_libraries(${SD_LIB} PUBLIC ggml zip) +target_include_directories(${SD_LIB} PUBLIC . thirdparty) target_compile_features(${SD_LIB} PUBLIC cxx_std_11) diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..bd9a378f0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +ARG UBUNTU_VERSION=22.04 + +FROM ubuntu:$UBUNTU_VERSION as build + +RUN apt-get update && apt-get install -y build-essential git cmake + +WORKDIR /sd.cpp + +COPY . . + +RUN mkdir build && cd build && cmake .. && cmake --build . --config Release + +FROM ubuntu:$UBUNTU_VERSION as runtime + +COPY --from=build /sd.cpp/build/bin/sd /sd + +ENTRYPOINT [ "/sd" ] \ No newline at end of file diff --git a/Dockerfile.musa b/Dockerfile.musa new file mode 100644 index 000000000..c7f5f2e83 --- /dev/null +++ b/Dockerfile.musa @@ -0,0 +1,22 @@ +ARG MUSA_VERSION=rc3.1.1 + +FROM mthreads/musa:${MUSA_VERSION}-devel-ubuntu22.04 as build + +RUN apt-get update && apt-get install -y ccache cmake git + +WORKDIR /sd.cpp + +COPY . . + +RUN mkdir build && cd build && \ + cmake .. -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \ + -DCMAKE_C_FLAGS="${CMAKE_C_FLAGS} -fopenmp -I/usr/lib/llvm-14/lib/clang/14.0.0/include -L/usr/lib/llvm-14/lib" \ + -DCMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -fopenmp -I/usr/lib/llvm-14/lib/clang/14.0.0/include -L/usr/lib/llvm-14/lib" \ + -DSD_MUSA=ON -DCMAKE_BUILD_TYPE=Release && \ + cmake --build . --config Release + +FROM mthreads/musa:${MUSA_VERSION}-runtime-ubuntu22.04 as runtime + +COPY --from=build /sd.cpp/build/bin/sd /sd + +ENTRYPOINT [ "/sd" ] \ No newline at end of file diff --git a/README.md b/README.md index 6a673b86f..4720dc29c 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,70 @@

- +

# stable-diffusion.cpp -Inference of [Stable Diffusion](https://github.com/CompVis/stable-diffusion) in pure C/C++ +Inference of Stable Diffusion and Flux in pure C/C++ ## Features - Plain C/C++ implementation based on [ggml](https://github.com/ggerganov/ggml), working in the same way as [llama.cpp](https://github.com/ggerganov/llama.cpp) +- Super lightweight and without external dependencies +- SD1.x, SD2.x, SDXL and [SD3/SD3.5](./docs/sd3.md) support + - !!!The VAE in SDXL encounters NaN issues under FP16, but unfortunately, the ggml_conv_2d only operates under FP16. Hence, a parameter is needed to specify the VAE that has fixed the FP16 NaN issue. You can find it here: [SDXL VAE FP16 Fix](https://huggingface.co/madebyollin/sdxl-vae-fp16-fix/blob/main/sdxl_vae.safetensors). +- [Flux-dev/Flux-schnell Support](./docs/flux.md) +- [FLUX.1-Kontext-dev](./docs/kontext.md) +- [Chroma](./docs/chroma.md) +- [SD-Turbo](https://huggingface.co/stabilityai/sd-turbo) and [SDXL-Turbo](https://huggingface.co/stabilityai/sdxl-turbo) support +- [PhotoMaker](https://github.com/TencentARC/PhotoMaker) support. - 16-bit, 32-bit float support -- 4-bit, 5-bit and 8-bit integer quantization support +- 2-bit, 3-bit, 4-bit, 5-bit and 8-bit integer quantization support - Accelerated memory-efficient CPU inference - - Only requires ~2.3GB when using txt2img with fp16 precision to generate a 512x512 image + - Only requires ~2.3GB when using txt2img with fp16 precision to generate a 512x512 image, enabling Flash Attention just requires ~1.8GB. - AVX, AVX2 and AVX512 support for x86 architectures +- Full CUDA, Metal, Vulkan, OpenCL and SYCL backend for GPU acceleration. +- Can load ckpt, safetensors and diffusers models/checkpoints. Standalone VAEs models + - No need to convert to `.ggml` or `.gguf` anymore! +- Flash Attention for memory usage optimization - Original `txt2img` and `img2img` mode - Negative prompt - [stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui) style tokenizer (not all the features, only token weighting for now) +- LoRA support, same as [stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Features#lora) +- Latent Consistency Models support (LCM/LCM-LoRA) +- Faster and memory efficient latent decoding with [TAESD](https://github.com/madebyollin/taesd) +- Upscale images generated with [ESRGAN](https://github.com/xinntao/Real-ESRGAN) +- VAE tiling processing for reduce memory usage +- Control Net support with SD 1.5 - Sampling method - `Euler A` + - `Euler` + - `Heun` + - `DPM2` + - `DPM++ 2M` + - [`DPM++ 2M v2`](https://github.com/AUTOMATIC1111/stable-diffusion-webui/discussions/8457) + - `DPM++ 2S a` + - [`LCM`](https://github.com/AUTOMATIC1111/stable-diffusion-webui/issues/13952) +- Cross-platform reproducibility (`--rng cuda`, consistent with the `stable-diffusion-webui GPU RNG`) +- Embedds generation parameters into png output as webui-compatible text string - Supported platforms - Linux - Mac OS - Windows + - Android (via Termux, [Local Diffusion](https://github.com/rmatif/Local-Diffusion)) ### TODO - [ ] More sampling methods -- [ ] GPU support - [ ] Make inference faster - The current implementation of ggml_conv_2d is slow and has high memory usage - [ ] Continuing to reduce memory usage (quantizing the weights of ggml_conv_2d) -- [ ] LoRA support -- [ ] k-quants support -- [ ] Cross-platform reproducibility (perhaps ensuring consistency with the original SD) +- [ ] Implement Inpainting support ## Usage +For most users, you can download the built executable program from the latest [release](https://github.com/leejet/stable-diffusion.cpp/releases/latest). +If the built product does not meet your requirements, you can choose to build it manually. + ### Get the Code ``` @@ -53,38 +81,25 @@ git submodule init git submodule update ``` -### Convert weights +### Download weights - download original weights(.ckpt or .safetensors). For example - Stable Diffusion v1.4 from https://huggingface.co/CompVis/stable-diffusion-v-1-4-original - Stable Diffusion v1.5 from https://huggingface.co/runwayml/stable-diffusion-v1-5 + - Stable Diffuison v2.1 from https://huggingface.co/stabilityai/stable-diffusion-2-1 + - Stable Diffusion 3 2B from https://huggingface.co/stabilityai/stable-diffusion-3-medium ```shell curl -L -O https://huggingface.co/CompVis/stable-diffusion-v-1-4-original/resolve/main/sd-v1-4.ckpt # curl -L -O https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.safetensors + # curl -L -O https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-nonema-pruned.safetensors + # curl -L -O https://huggingface.co/stabilityai/stable-diffusion-3-medium/resolve/main/sd3_medium_incl_clips_t5xxlfp16.safetensors ``` -- convert weights to ggml model format - - ```shell - cd models - pip install -r requirements.txt - python convert.py [path to weights] --out_type [output precision] - # For example, python convert.py sd-v1-4.ckpt --out_type f16 - ``` - -### Quantization - -You can specify the output model format using the --out_type parameter - -- `f16` for 16-bit floating-point -- `f32` for 32-bit floating-point -- `q8_0` for 8-bit integer quantization -- `q5_0` or `q5_1` for 5-bit integer quantization -- `q4_0` or `q4_1` for 4-bit integer quantization - ### Build +#### Build from scratch + ```shell mkdir build cd build @@ -92,13 +107,174 @@ cmake .. cmake --build . --config Release ``` -#### Using OpenBLAS +##### Using OpenBLAS ``` cmake .. -DGGML_OPENBLAS=ON cmake --build . --config Release ``` +##### Using CUDA + +This provides BLAS acceleration using the CUDA cores of your Nvidia GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager (e.g. `apt install nvidia-cuda-toolkit`) or from here: [CUDA Toolkit](https://developer.nvidia.com/cuda-downloads). Recommended to have at least 4 GB of VRAM. + +``` +cmake .. -DSD_CUDA=ON +cmake --build . --config Release +``` + +##### Using HipBLAS +This provides BLAS acceleration using the ROCm cores of your AMD GPU. Make sure to have the ROCm toolkit installed. + +Windows User Refer to [docs/hipBLAS_on_Windows.md](docs%2FhipBLAS_on_Windows.md) for a comprehensive guide. + +``` +cmake .. -G "Ninja" -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DSD_HIPBLAS=ON -DCMAKE_BUILD_TYPE=Release -DAMDGPU_TARGETS=gfx1100 +cmake --build . --config Release +``` + +##### Using MUSA + +This provides BLAS acceleration using the MUSA cores of your Moore Threads GPU. Make sure to have the MUSA toolkit installed. + +```bash +cmake .. -DCMAKE_C_COMPILER=/usr/local/musa/bin/clang -DCMAKE_CXX_COMPILER=/usr/local/musa/bin/clang++ -DSD_MUSA=ON -DCMAKE_BUILD_TYPE=Release +cmake --build . --config Release +``` + +##### Using Metal + +Using Metal makes the computation run on the GPU. Currently, there are some issues with Metal when performing operations on very large matrices, making it highly inefficient at the moment. Performance improvements are expected in the near future. + +``` +cmake .. -DSD_METAL=ON +cmake --build . --config Release +``` + +##### Using Vulkan + +Install Vulkan SDK from https://www.lunarg.com/vulkan-sdk/. + +``` +cmake .. -DSD_VULKAN=ON +cmake --build . --config Release +``` + +##### Using OpenCL (for Adreno GPU) + +Currently, it supports only Adreno GPUs and is primarily optimized for Q4_0 type + +To build for Windows ARM please refers to [Windows 11 Arm64 +](https://github.com/ggml-org/llama.cpp/blob/master/docs/backend/OPENCL.md#windows-11-arm64) + +Building for Android: + + Android NDK: + Download and install the Android NDK from the [official Android developer site](https://developer.android.com/ndk/downloads). + +Setup OpenCL Dependencies for NDK: + +You need to provide OpenCL headers and the ICD loader library to your NDK sysroot. + +* OpenCL Headers: + ```bash + # In a temporary working directory + git clone https://github.com/KhronosGroup/OpenCL-Headers + cd OpenCL-Headers + # Replace with your actual NDK installation path + # e.g., cp -r CL /path/to/android-ndk-r26c/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include + sudo cp -r CL /toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include + cd .. + ``` + +* OpenCL ICD Loader: + ```bash + # In the same temporary working directory + git clone https://github.com/KhronosGroup/OpenCL-ICD-Loader + cd OpenCL-ICD-Loader + mkdir build_ndk && cd build_ndk + + # Replace in the CMAKE_TOOLCHAIN_FILE and OPENCL_ICD_LOADER_HEADERS_DIR + cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_TOOLCHAIN_FILE=/build/cmake/android.toolchain.cmake \ + -DOPENCL_ICD_LOADER_HEADERS_DIR=/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include \ + -DANDROID_ABI=arm64-v8a \ + -DANDROID_PLATFORM=24 \ + -DANDROID_STL=c++_shared + + ninja + # Replace + # e.g., cp libOpenCL.so /path/to/android-ndk-r26c/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android + sudo cp libOpenCL.so /toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android + cd ../.. + ``` + +Build `stable-diffusion.cpp` for Android with OpenCL: + +```bash +mkdir build-android && cd build-android + +# Replace with your actual NDK installation path +# e.g., -DCMAKE_TOOLCHAIN_FILE=/path/to/android-ndk-r26c/build/cmake/android.toolchain.cmake +cmake .. -G Ninja \ + -DCMAKE_TOOLCHAIN_FILE=/build/cmake/android.toolchain.cmake \ + -DANDROID_ABI=arm64-v8a \ + -DANDROID_PLATFORM=android-28 \ + -DGGML_OPENMP=OFF \ + -DSD_OPENCL=ON + +ninja +``` +*(Note: Don't forget to include `LD_LIBRARY_PATH=/vendor/lib64` in your command line before running the binary)* + +##### Using SYCL + +Using SYCL makes the computation run on the Intel GPU. Please make sure you have installed the related driver and [IntelĀ® oneAPI Base toolkit](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html) before start. More details and steps can refer to [llama.cpp SYCL backend](https://github.com/ggerganov/llama.cpp/blob/master/docs/backend/SYCL.md#linux). + +``` +# Export relevant ENV variables +source /opt/intel/oneapi/setvars.sh + +# Option 1: Use FP32 (recommended for better performance in most cases) +cmake .. -DSD_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx + +# Option 2: Use FP16 +cmake .. -DSD_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DGGML_SYCL_F16=ON + +cmake --build . --config Release +``` + +Example of text2img by using SYCL backend: + +- download `stable-diffusion` model weight, refer to [download-weight](#download-weights). + +- run `./bin/sd -m ../models/sd3_medium_incl_clips_t5xxlfp16.safetensors --cfg-scale 5 --steps 30 --sampling-method euler -H 1024 -W 1024 --seed 42 -p "fantasy medieval village world inside a glass sphere , high detail, fantasy, realistic, light effect, hyper detail, volumetric lighting, cinematic, macro, depth of field, blur, red light and clouds from the back, highly detailed epic cinematic concept art cg render made in maya, blender and photoshop, octane render, excellent composition, dynamic dramatic cinematic lighting, aesthetic, very inspirational, world inside a glass sphere by james gurney by artgerm with james jean, joe fenton and tristan eaton by ross tran, fine details, 4k resolution"` + +

+ +

+ + + +##### Using Flash Attention + +Enabling flash attention for the diffusion model reduces memory usage by varying amounts of MB. +eg.: + - flux 768x768 ~600mb + - SD2 768x768 ~1400mb + +For most backends, it slows things down, but for cuda it generally speeds it up too. +At the moment, it is only supported for some models and some backends (like cpu, cuda/rocm, metal). + +Run by adding `--diffusion-fa` to the arguments and watch for: +``` +[INFO ] stable-diffusion.cpp:312 - Using flash attention in the diffusion model +``` +and the compute buffer shrink in the debug log: +``` +[DEBUG] ggml_extend.hpp:1004 - flux compute buffer size: 650.00 MB(VRAM) +``` + ### Run ``` @@ -106,29 +282,81 @@ usage: ./bin/sd [arguments] arguments: -h, --help show this help message and exit - -M, --mode [txt2img or img2img] generation mode (default: txt2img) - -t, --threads N number of threads to use during computation (default: -1). + -M, --mode [MODEL] run mode (txt2img or img2img or convert, default: txt2img) + -t, --threads N number of threads to use during computation (default: -1) If threads <= 0, then threads will be set to the number of CPU physical cores - -m, --model [MODEL] path to model + -m, --model [MODEL] path to full model + --diffusion-model path to the standalone diffusion model + --clip_l path to the clip-l text encoder + --clip_g path to the clip-g text encoder + --t5xxl path to the the t5xxl text encoder + --vae [VAE] path to vae + --taesd [TAESD_PATH] path to taesd. Using Tiny AutoEncoder for fast decoding (low quality) + --control-net [CONTROL_PATH] path to control net model + --embd-dir [EMBEDDING_PATH] path to embeddings + --stacked-id-embd-dir [DIR] path to PHOTOMAKER stacked id embeddings + --input-id-images-dir [DIR] path to PHOTOMAKER input id images dir + --normalize-input normalize PHOTOMAKER input id images + --upscale-model [ESRGAN_PATH] path to esrgan model. Upscale images after generate, just RealESRGAN_x4plus_anime_6B supported by now + --upscale-repeats Run the ESRGAN upscaler this many times (default 1) + --type [TYPE] weight type (examples: f32, f16, q4_0, q4_1, q5_0, q5_1, q8_0, q2_K, q3_K, q4_K) + If not specified, the default is the type of the weight file + --lora-model-dir [DIR] lora model directory -i, --init-img [IMAGE] path to the input image, required by img2img - -o, --output OUTPUT path to write result image to (default: .\output.png) + --mask [MASK] path to the mask image, required by img2img with mask + --control-image [IMAGE] path to image condition, control net + -r, --ref_image [PATH] reference image for Flux Kontext models (can be used multiple times) + -o, --output OUTPUT path to write result image to (default: ./output.png) -p, --prompt [PROMPT] the prompt to render -n, --negative-prompt PROMPT the negative prompt (default: "") --cfg-scale SCALE unconditional guidance scale: (default: 7.0) + --guidance SCALE guidance scale for img2img (default: 3.5) + --slg-scale SCALE skip layer guidance (SLG) scale, only for DiT models: (default: 0) + 0 means disabled, a value of 2.5 is nice for sd3.5 medium + --eta SCALE eta in DDIM, only for DDIM and TCD: (default: 0) + --skip-layers LAYERS Layers to skip for SLG steps: (default: [7,8,9]) + --skip-layer-start START SLG enabling point: (default: 0.01) + --skip-layer-end END SLG disabling point: (default: 0.2) + SLG will be enabled at step int([STEPS]*[START]) and disabled at int([STEPS]*[END]) --strength STRENGTH strength for noising/unnoising (default: 0.75) + --style-ratio STYLE-RATIO strength for keeping input identity (default: 20%) + --control-strength STRENGTH strength to apply Control Net (default: 0.9) 1.0 corresponds to full destruction of information in init image -H, --height H image height, in pixel space (default: 512) -W, --width W image width, in pixel space (default: 512) - --sample-method SAMPLE_METHOD sample method (default: "eular a") + --sampling-method {euler, euler_a, heun, dpm2, dpm++2s_a, dpm++2m, dpm++2mv2, ipndm, ipndm_v, lcm, ddim_trailing, tcd} + sampling method (default: "euler_a") --steps STEPS number of sample steps (default: 20) + --rng {std_default, cuda} RNG (default: cuda) -s SEED, --seed SEED RNG seed (default: 42, use random seed for < 0) + -b, --batch-count COUNT number of images to generate + --schedule {discrete, karras, exponential, ays, gits} Denoiser sigma schedule (default: discrete) + --clip-skip N ignore last layers of CLIP network; 1 ignores none, 2 ignores one layer (default: -1) + <= 0 represents unspecified, will be 1 for SD1.x, 2 for SD2.x + --vae-tiling process vae in tiles to reduce memory usage + --vae-on-cpu keep vae in cpu (for low vram) + --clip-on-cpu keep clip in cpu (for low vram) + --diffusion-fa use flash attention in the diffusion model (for low vram) + Might lower quality, since it implies converting k and v to f16. + This might crash if it is not supported by the backend. + --control-net-cpu keep controlnet in cpu (for low vram) + --canny apply canny preprocessor (edge detection) + --color colors the logging tags according to level + --chroma-disable-dit-mask disable dit mask for chroma + --chroma-enable-t5-mask enable t5 mask for chroma + --chroma-t5-mask-pad PAD_SIZE t5 mask pad size of chroma -v, --verbose print extra info ``` #### txt2img example -``` -./bin/sd -m ../models/sd-v1-4-ggml-model-f16.bin -p "a lovely cat" +```sh +./bin/sd -m ../models/sd-v1-4.ckpt -p "a lovely cat" +# ./bin/sd -m ../models/v1-5-pruned-emaonly.safetensors -p "a lovely cat" +# ./bin/sd -m ../models/sd_xl_base_1.0.safetensors --vae ../models/sdxl_vae-fp16-fix.safetensors -H 1024 -W 1024 -p "a lovely cat" -v +# ./bin/sd -m ../models/sd3_medium_incl_clips_t5xxlfp16.safetensors -H 1024 -W 1024 -p 'a lovely cat holding a sign says \"Stable Diffusion CPP\"' --cfg-scale 4.5 --sampling-method euler -v +# ./bin/sd --diffusion-model ../models/flux1-dev-q3_k.gguf --vae ../models/ae.sft --clip_l ../models/clip_l.safetensors --t5xxl ../models/t5xxl_fp16.safetensors -p "a lovely cat holding a sign says 'flux.cpp'" --cfg-scale 1.0 --sampling-method euler -v +# ./bin/sd -m ..\models\sd3.5_large.safetensors --clip_l ..\models\clip_l.safetensors --clip_g ..\models\clip_g.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -H 1024 -W 1024 -p 'a lovely cat holding a sign says \"Stable diffusion 3.5 Large\"' --cfg-scale 4.5 --sampling-method euler -v ``` Using formats of different precisions will yield results of varying quality. @@ -143,24 +371,62 @@ Using formats of different precisions will yield results of varying quality. ``` -./bin/sd --mode img2img -m ../models/sd-v1-4-ggml-model-f16.bin -p "cat with blue eyes" -i ./output.png -o ./img2img_output.png --strength 0.4 +./bin/sd --mode img2img -m ../models/sd-v1-4.ckpt -p "cat with blue eyes" -i ./output.png -o ./img2img_output.png --strength 0.4 ```

-## Memory/Disk Requirements +## More Guides + +- [LoRA](./docs/lora.md) +- [LCM/LCM-LoRA](./docs/lcm.md) +- [Using PhotoMaker to personalize image generation](./docs/photo_maker.md) +- [Using ESRGAN to upscale results](./docs/esrgan.md) +- [Using TAESD to faster decoding](./docs/taesd.md) +- [Docker](./docs/docker.md) +- [Quantization and GGUF](./docs/quantization_and_gguf.md) + +## Bindings + +These projects wrap `stable-diffusion.cpp` for easier use in other languages/frameworks. + +* Golang (non-cgo): [seasonjs/stable-diffusion](https://github.com/seasonjs/stable-diffusion) +* Golang (cgo): [Binozo/GoStableDiffusion](https://github.com/Binozo/GoStableDiffusion) +* C#: [DarthAffe/StableDiffusion.NET](https://github.com/DarthAffe/StableDiffusion.NET) +* Python: [william-murray1204/stable-diffusion-cpp-python](https://github.com/william-murray1204/stable-diffusion-cpp-python) +* Rust: [newfla/diffusion-rs](https://github.com/newfla/diffusion-rs) +* Flutter/Dart: [rmatif/Local-Diffusion](https://github.com/rmatif/Local-Diffusion) + +## UIs + +These projects use `stable-diffusion.cpp` as a backend for their image generation. + +- [Jellybox](https://jellybox.com) +- [Stable Diffusion GUI](https://github.com/fszontagh/sd.cpp.gui.wx) +- [Stable Diffusion CLI-GUI](https://github.com/piallai/stable-diffusion.cpp) +- [Local Diffusion](https://github.com/rmatif/Local-Diffusion) + +## Contributors + +Thank you to all the people who have already contributed to stable-diffusion.cpp! + +[![Contributors](https://contrib.rocks/image?repo=leejet/stable-diffusion.cpp)](https://github.com/leejet/stable-diffusion.cpp/graphs/contributors) -| precision | f32 | f16 |q8_0 |q5_0 |q5_1 |q4_0 |q4_1 | -| ---- | ---- |---- |---- |---- |---- |---- |---- | -| **Disk** | 2.7G | 2.0G | 1.7G | 1.6G | 1.6G | 1.5G | 1.5G | -| **Memory**(txt2img - 512 x 512) | ~2.8G | ~2.3G | ~2.1G | ~2.0G | ~2.0G | ~2.0G | ~2.0G | +## Star History +[![Star History Chart](https://api.star-history.com/svg?repos=leejet/stable-diffusion.cpp&type=Date)](https://star-history.com/#leejet/stable-diffusion.cpp&Date) ## References - [ggml](https://github.com/ggerganov/ggml) - [stable-diffusion](https://github.com/CompVis/stable-diffusion) +- [sd3-ref](https://github.com/Stability-AI/sd3-ref) +- [stable-diffusion-stability-ai](https://github.com/Stability-AI/stablediffusion) - [stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui) +- [ComfyUI](https://github.com/comfyanonymous/ComfyUI) - [k-diffusion](https://github.com/crowsonkb/k-diffusion) +- [latent-consistency-model](https://github.com/luosiallen/latent-consistency-model) +- [generative-models](https://github.com/Stability-AI/generative-models/) +- [PhotoMaker](https://github.com/TencentARC/PhotoMaker) diff --git a/assets/cat_with_sd_cpp_20184.png b/assets/cat_with_sd_cpp_20184.png new file mode 100644 index 000000000..04a82bef8 Binary files /dev/null and b/assets/cat_with_sd_cpp_20184.png differ diff --git a/assets/cat_with_sd_cpp_42.png b/assets/cat_with_sd_cpp_42.png new file mode 100644 index 000000000..6368d5427 Binary files /dev/null and b/assets/cat_with_sd_cpp_42.png differ diff --git a/assets/control.png b/assets/control.png new file mode 100644 index 000000000..3ed95d093 Binary files /dev/null and b/assets/control.png differ diff --git a/assets/control_2.png b/assets/control_2.png new file mode 100644 index 000000000..9352dc0f4 Binary files /dev/null and b/assets/control_2.png differ diff --git a/assets/control_3.png b/assets/control_3.png new file mode 100644 index 000000000..4d114df0c Binary files /dev/null and b/assets/control_3.png differ diff --git a/assets/flux/chroma_v40.png b/assets/flux/chroma_v40.png new file mode 100644 index 000000000..4217009dc Binary files /dev/null and b/assets/flux/chroma_v40.png differ diff --git a/assets/flux/flux1-dev-q2_k.png b/assets/flux/flux1-dev-q2_k.png new file mode 100644 index 000000000..1aef6f8c6 Binary files /dev/null and b/assets/flux/flux1-dev-q2_k.png differ diff --git a/assets/flux/flux1-dev-q3_k.png b/assets/flux/flux1-dev-q3_k.png new file mode 100644 index 000000000..352bfc70c Binary files /dev/null and b/assets/flux/flux1-dev-q3_k.png differ diff --git a/assets/flux/flux1-dev-q4_0.png b/assets/flux/flux1-dev-q4_0.png new file mode 100644 index 000000000..1a5ee2b56 Binary files /dev/null and b/assets/flux/flux1-dev-q4_0.png differ diff --git a/assets/flux/flux1-dev-q4_k.png b/assets/flux/flux1-dev-q4_k.png new file mode 100644 index 000000000..9b3ebdd1a Binary files /dev/null and b/assets/flux/flux1-dev-q4_k.png differ diff --git a/assets/flux/flux1-dev-q8_0 with lora.png b/assets/flux/flux1-dev-q8_0 with lora.png new file mode 100644 index 000000000..fb05892aa Binary files /dev/null and b/assets/flux/flux1-dev-q8_0 with lora.png differ diff --git a/assets/flux/flux1-dev-q8_0.png b/assets/flux/flux1-dev-q8_0.png new file mode 100644 index 000000000..3f469d2da Binary files /dev/null and b/assets/flux/flux1-dev-q8_0.png differ diff --git a/assets/flux/flux1-schnell-q8_0.png b/assets/flux/flux1-schnell-q8_0.png new file mode 100644 index 000000000..4ba7dc401 Binary files /dev/null and b/assets/flux/flux1-schnell-q8_0.png differ diff --git a/assets/flux/kontext1_dev_output.png b/assets/flux/kontext1_dev_output.png new file mode 100644 index 000000000..4fa5e38dd Binary files /dev/null and b/assets/flux/kontext1_dev_output.png differ diff --git a/assets/photomaker_examples/lenna_woman/lenna.jpg b/assets/photomaker_examples/lenna_woman/lenna.jpg new file mode 100644 index 000000000..ca3ef19b5 Binary files /dev/null and b/assets/photomaker_examples/lenna_woman/lenna.jpg differ diff --git a/assets/photomaker_examples/newton_man/newton_0.jpg b/assets/photomaker_examples/newton_man/newton_0.jpg new file mode 100644 index 000000000..71ba285fd Binary files /dev/null and b/assets/photomaker_examples/newton_man/newton_0.jpg differ diff --git a/assets/photomaker_examples/newton_man/newton_1.jpg b/assets/photomaker_examples/newton_man/newton_1.jpg new file mode 100644 index 000000000..a59ed8c72 Binary files /dev/null and b/assets/photomaker_examples/newton_man/newton_1.jpg differ diff --git a/assets/photomaker_examples/newton_man/newton_2.png b/assets/photomaker_examples/newton_man/newton_2.png new file mode 100644 index 000000000..d8d4b9482 Binary files /dev/null and b/assets/photomaker_examples/newton_man/newton_2.png differ diff --git a/assets/photomaker_examples/newton_man/newton_3.jpg b/assets/photomaker_examples/newton_man/newton_3.jpg new file mode 100644 index 000000000..852867e8d Binary files /dev/null and b/assets/photomaker_examples/newton_man/newton_3.jpg differ diff --git a/assets/photomaker_examples/scarletthead_woman/scarlett_0.jpg b/assets/photomaker_examples/scarletthead_woman/scarlett_0.jpg new file mode 100644 index 000000000..ce9435a6f Binary files /dev/null and b/assets/photomaker_examples/scarletthead_woman/scarlett_0.jpg differ diff --git a/assets/photomaker_examples/scarletthead_woman/scarlett_1.jpg b/assets/photomaker_examples/scarletthead_woman/scarlett_1.jpg new file mode 100644 index 000000000..23269960f Binary files /dev/null and b/assets/photomaker_examples/scarletthead_woman/scarlett_1.jpg differ diff --git a/assets/photomaker_examples/scarletthead_woman/scarlett_2.jpg b/assets/photomaker_examples/scarletthead_woman/scarlett_2.jpg new file mode 100644 index 000000000..93ae735aa Binary files /dev/null and b/assets/photomaker_examples/scarletthead_woman/scarlett_2.jpg differ diff --git a/assets/photomaker_examples/scarletthead_woman/scarlett_3.jpg b/assets/photomaker_examples/scarletthead_woman/scarlett_3.jpg new file mode 100644 index 000000000..ccdca4be3 Binary files /dev/null and b/assets/photomaker_examples/scarletthead_woman/scarlett_3.jpg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_1.jpg b/assets/photomaker_examples/yangmi_woman/yangmi_1.jpg new file mode 100644 index 000000000..20fe66c87 Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_1.jpg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_2.jpeg b/assets/photomaker_examples/yangmi_woman/yangmi_2.jpeg new file mode 100644 index 000000000..9ed47435b Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_2.jpeg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_3.jpg b/assets/photomaker_examples/yangmi_woman/yangmi_3.jpg new file mode 100644 index 000000000..e840c1c6b Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_3.jpg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_4.jpg b/assets/photomaker_examples/yangmi_woman/yangmi_4.jpg new file mode 100644 index 000000000..f43601123 Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_4.jpg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_5.jpg b/assets/photomaker_examples/yangmi_woman/yangmi_5.jpg new file mode 100644 index 000000000..95e771406 Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_5.jpg differ diff --git a/assets/photomaker_examples/yangmi_woman/yangmi_6.jpg b/assets/photomaker_examples/yangmi_woman/yangmi_6.jpg new file mode 100644 index 000000000..8c7c4428e Binary files /dev/null and b/assets/photomaker_examples/yangmi_woman/yangmi_6.jpg differ diff --git a/assets/sd3.5_large.png b/assets/sd3.5_large.png new file mode 100644 index 000000000..b76b13225 Binary files /dev/null and b/assets/sd3.5_large.png differ diff --git a/assets/sycl_sd3_output.png b/assets/sycl_sd3_output.png new file mode 100644 index 000000000..9a902a37c Binary files /dev/null and b/assets/sycl_sd3_output.png differ diff --git a/assets/with_lcm.png b/assets/with_lcm.png new file mode 100644 index 000000000..70e2c700c Binary files /dev/null and b/assets/with_lcm.png differ diff --git a/assets/without_lcm.png b/assets/without_lcm.png new file mode 100644 index 000000000..145ab9419 Binary files /dev/null and b/assets/without_lcm.png differ diff --git a/clip.hpp b/clip.hpp new file mode 100644 index 000000000..d359f61cd --- /dev/null +++ b/clip.hpp @@ -0,0 +1,952 @@ +#ifndef __CLIP_HPP__ +#define __CLIP_HPP__ + +#include "ggml_extend.hpp" +#include "model.h" + +/*================================================== CLIPTokenizer ===================================================*/ + +std::pair, std::string> extract_and_remove_lora(std::string text) { + std::regex re("]+)>"); + std::smatch matches; + std::unordered_map filename2multiplier; + + while (std::regex_search(text, matches, re)) { + std::string filename = matches[1].str(); + float multiplier = std::stof(matches[2].str()); + + text = std::regex_replace(text, re, "", std::regex_constants::format_first_only); + + if (multiplier == 0.f) { + continue; + } + + if (filename2multiplier.find(filename) == filename2multiplier.end()) { + filename2multiplier[filename] = multiplier; + } else { + filename2multiplier[filename] += multiplier; + } + } + + return std::make_pair(filename2multiplier, text); +} + +std::vector> bytes_to_unicode() { + std::vector> byte_unicode_pairs; + std::set byte_set; + for (int b = static_cast('!'); b <= static_cast('~'); ++b) { + byte_set.insert(b); + byte_unicode_pairs.push_back(std::pair(b, unicode_value_to_utf32(b))); + } + for (int b = 161; b <= 172; ++b) { + byte_set.insert(b); + byte_unicode_pairs.push_back(std::pair(b, unicode_value_to_utf32(b))); + } + for (int b = 174; b <= 255; ++b) { + byte_set.insert(b); + byte_unicode_pairs.push_back(std::pair(b, unicode_value_to_utf32(b))); + } + int n = 0; + for (int b = 0; b < 256; ++b) { + if (byte_set.find(b) == byte_set.end()) { + byte_unicode_pairs.push_back(std::pair(b, unicode_value_to_utf32(n + 256))); + ++n; + } + } + // LOG_DEBUG("byte_unicode_pairs %d", byte_unicode_pairs.size()); + return byte_unicode_pairs; +} + +// Ref: https://github.com/openai/CLIP/blob/main/clip/simple_tokenizer.py + +typedef std::function&)> on_new_token_cb_t; + +class CLIPTokenizer { +private: + std::map byte_encoder; + std::map byte_decoder; + std::map encoder; + std::map decoder; + std::map, int> bpe_ranks; + std::regex pat; + int encoder_len; + int bpe_len; + +public: + const std::string UNK_TOKEN = "<|endoftext|>"; + const std::string BOS_TOKEN = "<|startoftext|>"; + const std::string EOS_TOKEN = "<|endoftext|>"; + const std::string PAD_TOKEN = "<|endoftext|>"; + + const int UNK_TOKEN_ID = 49407; + const int BOS_TOKEN_ID = 49406; + const int EOS_TOKEN_ID = 49407; + const int PAD_TOKEN_ID = 49407; + +private: + static std::string strip(const std::string& str) { + std::string::size_type start = str.find_first_not_of(" \t\n\r\v\f"); + std::string::size_type end = str.find_last_not_of(" \t\n\r\v\f"); + + if (start == std::string::npos) { + // String contains only whitespace characters + return ""; + } + + return str.substr(start, end - start + 1); + } + + static std::string whitespace_clean(std::string text) { + text = std::regex_replace(text, std::regex(R"(\s+)"), " "); + text = strip(text); + return text; + } + + static std::set> get_pairs(const std::vector& subwords) { + std::set> pairs; + if (subwords.size() == 0) { + return pairs; + } + std::u32string prev_subword = subwords[0]; + for (int i = 1; i < subwords.size(); i++) { + std::u32string subword = subwords[i]; + std::pair pair(prev_subword, subword); + pairs.insert(pair); + prev_subword = subword; + } + return pairs; + } + +public: + CLIPTokenizer(int pad_token_id = 49407, const std::string& merges_utf8_str = "") + : PAD_TOKEN_ID(pad_token_id) { + if (merges_utf8_str.size() > 0) { + load_from_merges(merges_utf8_str); + } else { + load_from_merges(ModelLoader::load_merges()); + } + } + + void load_from_merges(const std::string& merges_utf8_str) { + auto byte_unicode_pairs = bytes_to_unicode(); + // printf("byte_unicode_pairs have %lu pairs \n", byte_unicode_pairs.size()); + byte_encoder = std::map(byte_unicode_pairs.begin(), byte_unicode_pairs.end()); + for (auto& pair : byte_unicode_pairs) { + byte_decoder[pair.second] = pair.first; + } + // for (auto & pair: byte_unicode_pairs) { + // std::cout << pair.first << ": " << pair.second << std::endl; + // } + std::vector merges; + size_t start = 0; + size_t pos; + std::u32string merges_utf32_str = utf8_to_utf32(merges_utf8_str); + while ((pos = merges_utf32_str.find('\n', start)) != std::string::npos) { + merges.push_back(merges_utf32_str.substr(start, pos - start)); + start = pos + 1; + } + // LOG_DEBUG("merges size %llu", merges.size()); + GGML_ASSERT(merges.size() == 48895); + merges = std::vector(merges.begin() + 1, merges.end()); + std::vector> merge_pairs; + for (const auto& merge : merges) { + size_t space_pos = merge.find(' '); + merge_pairs.emplace_back(merge.substr(0, space_pos), merge.substr(space_pos + 1)); + // LOG_DEBUG("%s", utf32_to_utf8(merge.substr(space_pos + 1)).c_str()); + // printf("%s :: %s | %s \n", utf32_to_utf8(merge).c_str(), utf32_to_utf8(merge.substr(0, space_pos)).c_str(), + // utf32_to_utf8(merge.substr(space_pos + 1)).c_str()); + } + std::vector vocab; + for (const auto& pair : byte_unicode_pairs) { + vocab.push_back(pair.second); + } + for (const auto& pair : byte_unicode_pairs) { + vocab.push_back(pair.second + utf8_to_utf32("")); + } + for (const auto& merge : merge_pairs) { + vocab.push_back(merge.first + merge.second); + } + vocab.push_back(utf8_to_utf32("<|startoftext|>")); + vocab.push_back(utf8_to_utf32("<|endoftext|>")); + LOG_DEBUG("vocab size: %llu", vocab.size()); + int i = 0; + for (const auto& token : vocab) { + encoder[token] = i; + decoder[i] = token; + i++; + } + encoder_len = i; + + auto it = encoder.find(utf8_to_utf32("img")); + if (it != encoder.end()) { + LOG_DEBUG(" trigger word img already in vocab"); + } else { + LOG_DEBUG(" trigger word img not in vocab yet"); + } + + int rank = 0; + for (const auto& merge : merge_pairs) { + bpe_ranks[merge] = rank++; + } + bpe_len = rank; + }; + + void add_token(const std::string& text) { + std::u32string token = utf8_to_utf32(text); + auto it = encoder.find(token); + if (it != encoder.end()) { + encoder[token] = encoder_len; + decoder[encoder_len] = token; + encoder_len++; + } + } + + std::u32string bpe(const std::u32string& token) { + std::vector word; + + for (int i = 0; i < token.size() - 1; i++) { + word.emplace_back(1, token[i]); + } + word.push_back(token.substr(token.size() - 1) + utf8_to_utf32("")); + + std::set> pairs = get_pairs(word); + + if (pairs.empty()) { + return token + utf8_to_utf32(""); + } + + while (true) { + auto min_pair_iter = std::min_element(pairs.begin(), + pairs.end(), + [&](const std::pair& a, + const std::pair& b) { + if (bpe_ranks.find(a) == bpe_ranks.end()) { + return false; + } else if (bpe_ranks.find(b) == bpe_ranks.end()) { + return true; + } + return bpe_ranks.at(a) < bpe_ranks.at(b); + }); + + const std::pair& bigram = *min_pair_iter; + + if (bpe_ranks.find(bigram) == bpe_ranks.end()) { + break; + } + + std::u32string first = bigram.first; + std::u32string second = bigram.second; + std::vector new_word; + int32_t i = 0; + + while (i < word.size()) { + auto it = std::find(word.begin() + i, word.end(), first); + if (it == word.end()) { + new_word.insert(new_word.end(), word.begin() + i, word.end()); + break; + } + new_word.insert(new_word.end(), word.begin() + i, it); + i = static_cast(std::distance(word.begin(), it)); + + if (word[i] == first && i < static_cast(word.size()) - 1 && word[i + 1] == second) { + new_word.push_back(first + second); + i += 2; + } else { + new_word.push_back(word[i]); + i += 1; + } + } + + word = new_word; + + if (word.size() == 1) { + break; + } + pairs = get_pairs(word); + } + + std::u32string result; + for (int i = 0; i < word.size(); i++) { + result += word[i]; + if (i != word.size() - 1) { + result += utf8_to_utf32(" "); + } + } + + return result; + } + + std::vector tokenize(std::string text, + on_new_token_cb_t on_new_token_cb, + size_t max_length = 0, + bool padding = false) { + std::vector tokens = encode(text, on_new_token_cb); + + tokens.insert(tokens.begin(), BOS_TOKEN_ID); + if (max_length > 0) { + if (tokens.size() > max_length - 1) { + tokens.resize(max_length - 1); + tokens.push_back(EOS_TOKEN_ID); + } else { + tokens.push_back(EOS_TOKEN_ID); + if (padding) { + tokens.insert(tokens.end(), max_length - tokens.size(), PAD_TOKEN_ID); + } + } + } + + return tokens; + } + + void pad_tokens(std::vector& tokens, + std::vector& weights, + size_t max_length = 0, + bool padding = false) { + if (max_length > 0 && padding) { + size_t n = std::ceil(tokens.size() * 1.0 / (max_length - 2)); + if (n == 0) { + n = 1; + } + size_t length = max_length * n; + LOG_DEBUG("token length: %llu", length); + std::vector new_tokens; + std::vector new_weights; + new_tokens.push_back(BOS_TOKEN_ID); + new_weights.push_back(1.0); + int token_idx = 0; + for (int i = 1; i < length; i++) { + if (token_idx >= tokens.size()) { + break; + } + if (i % max_length == 0) { + new_tokens.push_back(BOS_TOKEN_ID); + new_weights.push_back(1.0); + } else if (i % max_length == max_length - 1) { + new_tokens.push_back(EOS_TOKEN_ID); + new_weights.push_back(1.0); + } else { + new_tokens.push_back(tokens[token_idx]); + new_weights.push_back(weights[token_idx]); + token_idx++; + } + } + + new_tokens.push_back(EOS_TOKEN_ID); + new_weights.push_back(1.0); + tokens = new_tokens; + weights = new_weights; + + if (padding) { + tokens.insert(tokens.end(), length - tokens.size(), PAD_TOKEN_ID); + weights.insert(weights.end(), length - weights.size(), 1.0); + } + } + } + + std::string clean_up_tokenization(std::string& text) { + std::regex pattern(R"( ,)"); + // Replace " ," with "," + std::string result = std::regex_replace(text, pattern, ","); + return result; + } + + std::string decode(const std::vector& tokens) { + std::string text = ""; + for (int t : tokens) { + if (t == 49406 || t == 49407) + continue; + std::u32string ts = decoder[t]; + // printf("%d, %s \n", t, utf32_to_utf8(ts).c_str()); + std::string s = utf32_to_utf8(ts); + if (s.length() >= 4) { + if (ends_with(s, "")) { + text += s.replace(s.length() - 4, s.length() - 1, "") + " "; + } else { + text += s; + } + } else { + text += " " + s; + } + } + // std::vector bytes; + // for (auto c : text){ + // bytes.push_back(byte_decoder[c]); + // } + + // std::string s((char *)bytes.data()); + // std::string s = ""; + text = clean_up_tokenization(text); + return trim(text); + } + + std::vector encode(std::string text, on_new_token_cb_t on_new_token_cb) { + std::string original_text = text; + std::vector bpe_tokens; + text = whitespace_clean(text); + std::transform(text.begin(), text.end(), text.begin(), [](unsigned char c) { return std::tolower(c); }); + + std::regex pat(R"(<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[[:alpha:]]+|[[:digit:]]|[^[:space:][:alpha:][:digit:]]+)", + std::regex::icase); + + std::smatch matches; + std::string str = text; + std::vector token_strs; + while (std::regex_search(str, matches, pat)) { + bool skip = on_new_token_cb(str, bpe_tokens); + if (skip) { + continue; + } + for (auto& token : matches) { + std::string token_str = token.str(); + std::u32string utf32_token; + for (int i = 0; i < token_str.length(); i++) { + unsigned char b = token_str[i]; + utf32_token += byte_encoder[b]; + } + auto bpe_strs = bpe(utf32_token); + size_t start = 0; + size_t pos; + while ((pos = bpe_strs.find(' ', start)) != std::u32string::npos) { + auto bpe_str = bpe_strs.substr(start, pos - start); + bpe_tokens.push_back(encoder[bpe_str]); + token_strs.push_back(utf32_to_utf8(bpe_str)); + + start = pos + 1; + } + auto bpe_str = bpe_strs.substr(start, bpe_strs.size() - start); + bpe_tokens.push_back(encoder[bpe_str]); + token_strs.push_back(utf32_to_utf8(bpe_str)); + } + str = matches.suffix(); + } + std::stringstream ss; + ss << "["; + for (auto token : token_strs) { + ss << "\"" << token << "\", "; + } + ss << "]"; + // LOG_DEBUG("split prompt \"%s\" to tokens %s", original_text.c_str(), ss.str().c_str()); + // printf("split prompt \"%s\" to tokens %s \n", original_text.c_str(), ss.str().c_str()); + return bpe_tokens; + } +}; + +/*================================================ FrozenCLIPEmbedder ================================================*/ + +// Ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/clip/modeling_clip.py + +struct CLIPMLP : public GGMLBlock { +protected: + bool use_gelu; + +public: + CLIPMLP(int64_t d_model, int64_t intermediate_size) { + blocks["fc1"] = std::shared_ptr(new Linear(d_model, intermediate_size)); + blocks["fc2"] = std::shared_ptr(new Linear(intermediate_size, d_model)); + + if (d_model == 1024 || d_model == 1280) { // SD 2.x + use_gelu = true; + } else { // SD 1.x + use_gelu = false; + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, n_token, d_model] + auto fc1 = std::dynamic_pointer_cast(blocks["fc1"]); + auto fc2 = std::dynamic_pointer_cast(blocks["fc2"]); + + x = fc1->forward(ctx, x); + if (use_gelu) { + x = ggml_gelu_inplace(ctx, x); + } else { + x = ggml_gelu_quick_inplace(ctx, x); + } + x = fc2->forward(ctx, x); + return x; + } +}; + +struct CLIPLayer : public GGMLBlock { +protected: + int64_t d_model; // hidden_size/embed_dim + int64_t n_head; + int64_t intermediate_size; + +public: + CLIPLayer(int64_t d_model, + int64_t n_head, + int64_t intermediate_size) + : d_model(d_model), + n_head(n_head), + intermediate_size(intermediate_size) { + blocks["self_attn"] = std::shared_ptr(new MultiheadAttention(d_model, n_head, true, true)); + + blocks["layer_norm1"] = std::shared_ptr(new LayerNorm(d_model)); + blocks["layer_norm2"] = std::shared_ptr(new LayerNorm(d_model)); + + blocks["mlp"] = std::shared_ptr(new CLIPMLP(d_model, intermediate_size)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, bool mask = true) { + // x: [N, n_token, d_model] + auto self_attn = std::dynamic_pointer_cast(blocks["self_attn"]); + auto layer_norm1 = std::dynamic_pointer_cast(blocks["layer_norm1"]); + auto layer_norm2 = std::dynamic_pointer_cast(blocks["layer_norm2"]); + auto mlp = std::dynamic_pointer_cast(blocks["mlp"]); + + x = ggml_add(ctx, x, self_attn->forward(ctx, layer_norm1->forward(ctx, x), mask)); + x = ggml_add(ctx, x, mlp->forward(ctx, layer_norm2->forward(ctx, x))); + return x; + } +}; + +struct CLIPEncoder : public GGMLBlock { +protected: + int64_t n_layer; + +public: + CLIPEncoder(int64_t n_layer, + int64_t d_model, + int64_t n_head, + int64_t intermediate_size) + : n_layer(n_layer) { + for (int i = 0; i < n_layer; i++) { + std::string name = "layers." + std::to_string(i); + blocks[name] = std::shared_ptr(new CLIPLayer(d_model, n_head, intermediate_size)); + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, int clip_skip = -1, bool mask = true) { + // x: [N, n_token, d_model] + int layer_idx = n_layer - 1; + // LOG_DEBUG("clip_skip %d", clip_skip); + if (clip_skip > 0) { + layer_idx = n_layer - clip_skip; + } + + for (int i = 0; i < n_layer; i++) { + // LOG_DEBUG("layer %d", i); + if (i == layer_idx + 1) { + break; + } + std::string name = "layers." + std::to_string(i); + auto layer = std::dynamic_pointer_cast(blocks[name]); + x = layer->forward(ctx, x, mask); // [N, n_token, d_model] + // LOG_DEBUG("layer %d", i); + } + return x; + } +}; + +class CLIPEmbeddings : public GGMLBlock { +protected: + int64_t embed_dim; + int64_t vocab_size; + int64_t num_positions; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type token_wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "token_embedding.weight") != tensor_types.end()) ? tensor_types[prefix + "token_embedding.weight"] : GGML_TYPE_F32; + enum ggml_type position_wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "position_embedding.weight") != tensor_types.end()) ? tensor_types[prefix + "position_embedding.weight"] : GGML_TYPE_F32; + + params["token_embedding.weight"] = ggml_new_tensor_2d(ctx, token_wtype, embed_dim, vocab_size); + params["position_embedding.weight"] = ggml_new_tensor_2d(ctx, position_wtype, embed_dim, num_positions); + } + +public: + CLIPEmbeddings(int64_t embed_dim, + int64_t vocab_size = 49408, + int64_t num_positions = 77) + : embed_dim(embed_dim), + vocab_size(vocab_size), + num_positions(num_positions) { + } + + struct ggml_tensor* get_token_embed_weight() { + return params["token_embedding.weight"]; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids, + struct ggml_tensor* custom_embed_weight) { + // input_ids: [N, n_token] + auto token_embed_weight = params["token_embedding.weight"]; + auto position_embed_weight = params["position_embedding.weight"]; + + GGML_ASSERT(input_ids->ne[0] == position_embed_weight->ne[1]); + input_ids = ggml_reshape_3d(ctx, input_ids, input_ids->ne[0], 1, input_ids->ne[1]); + auto token_embedding = ggml_get_rows(ctx, custom_embed_weight != NULL ? custom_embed_weight : token_embed_weight, input_ids); + token_embedding = ggml_reshape_3d(ctx, token_embedding, token_embedding->ne[0], token_embedding->ne[1], token_embedding->ne[3]); + + // token_embedding + position_embedding + auto x = ggml_add(ctx, + token_embedding, + position_embed_weight); // [N, n_token, embed_dim] + return x; + } +}; + +class CLIPVisionEmbeddings : public GGMLBlock { +protected: + int64_t embed_dim; + int64_t num_channels; + int64_t patch_size; + int64_t image_size; + int64_t num_patches; + int64_t num_positions; + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type patch_wtype = GGML_TYPE_F16; // tensor_types.find(prefix + "patch_embedding.weight") != tensor_types.end() ? tensor_types[prefix + "patch_embedding.weight"] : GGML_TYPE_F16; + enum ggml_type class_wtype = GGML_TYPE_F32; // tensor_types.find(prefix + "class_embedding") != tensor_types.end() ? tensor_types[prefix + "class_embedding"] : GGML_TYPE_F32; + enum ggml_type position_wtype = GGML_TYPE_F32; // tensor_types.find(prefix + "position_embedding.weight") != tensor_types.end() ? tensor_types[prefix + "position_embedding.weight"] : GGML_TYPE_F32; + + params["patch_embedding.weight"] = ggml_new_tensor_4d(ctx, patch_wtype, patch_size, patch_size, num_channels, embed_dim); + params["class_embedding"] = ggml_new_tensor_1d(ctx, class_wtype, embed_dim); + params["position_embedding.weight"] = ggml_new_tensor_2d(ctx, position_wtype, embed_dim, num_positions); + } + +public: + CLIPVisionEmbeddings(int64_t embed_dim, + int64_t num_channels = 3, + int64_t patch_size = 14, + int64_t image_size = 224) + : embed_dim(embed_dim), + num_channels(num_channels), + patch_size(patch_size), + image_size(image_size) { + num_patches = (image_size / patch_size) * (image_size / patch_size); + num_positions = num_patches + 1; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* pixel_values) { + // pixel_values: [N, num_channels, image_size, image_size] + // return: [N, num_positions, embed_dim] + GGML_ASSERT(pixel_values->ne[0] == image_size && pixel_values->ne[1] == image_size && pixel_values->ne[2] == num_channels); + + auto patch_embed_weight = params["patch_embedding.weight"]; + auto class_embed_weight = params["class_embedding"]; + auto position_embed_weight = params["position_embedding.weight"]; + + // concat(patch_embedding, class_embedding) + position_embedding + struct ggml_tensor* patch_embedding; + int64_t N = pixel_values->ne[3]; + patch_embedding = ggml_nn_conv_2d(ctx, pixel_values, patch_embed_weight, NULL, patch_size, patch_size); // [N, embed_dim, image_size // pacht_size, image_size // pacht_size] + patch_embedding = ggml_reshape_3d(ctx, patch_embedding, num_patches, embed_dim, N); // [N, embed_dim, num_patches] + patch_embedding = ggml_cont(ctx, ggml_permute(ctx, patch_embedding, 1, 0, 2, 3)); // [N, num_patches, embed_dim] + patch_embedding = ggml_reshape_4d(ctx, patch_embedding, 1, embed_dim, num_patches, N); // [N, num_patches, embed_dim, 1] + + struct ggml_tensor* class_embedding = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, embed_dim, N); + class_embedding = ggml_repeat(ctx, class_embed_weight, class_embedding); // [N, embed_dim] + class_embedding = ggml_reshape_4d(ctx, class_embedding, 1, embed_dim, 1, N); // [N, 1, embed_dim, 1] + + struct ggml_tensor* x = ggml_concat(ctx, class_embedding, patch_embedding, 2); // [N, num_positions, embed_dim, 1] + x = ggml_reshape_3d(ctx, x, embed_dim, num_positions, N); // [N, num_positions, embed_dim] + x = ggml_add(ctx, x, position_embed_weight); + return x; // [N, num_positions, embed_dim] + } +}; + +// OPENAI_CLIP_VIT_L_14: https://huggingface.co/openai/clip-vit-large-patch14/blob/main/config.json +// OPEN_CLIP_VIT_H_14: https://huggingface.co/laion/CLIP-ViT-H-14-laion2B-s32B-b79K/blob/main/config.json +// OPEN_CLIP_VIT_BIGG_14: https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/blob/main/config.json (CLIPTextModelWithProjection) + +enum CLIPVersion { + OPENAI_CLIP_VIT_L_14, // SD 1.x and SDXL + OPEN_CLIP_VIT_H_14, // SD 2.x + OPEN_CLIP_VIT_BIGG_14, // SDXL +}; + +class CLIPTextModel : public GGMLBlock { +protected: + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + if (version == OPEN_CLIP_VIT_BIGG_14) { + enum ggml_type wtype = GGML_TYPE_F32; // tensor_types.find(prefix + "text_projection") != tensor_types.end() ? tensor_types[prefix + "text_projection"] : GGML_TYPE_F32; + params["text_projection"] = ggml_new_tensor_2d(ctx, wtype, projection_dim, hidden_size); + } + } + +public: + CLIPVersion version = OPENAI_CLIP_VIT_L_14; + // network hparams + int32_t vocab_size = 49408; + int32_t n_token = 77; // max_position_embeddings + int32_t hidden_size = 768; + int32_t intermediate_size = 3072; + int32_t n_head = 12; + int32_t n_layer = 12; // num_hidden_layers + int32_t projection_dim = 1280; // only for OPEN_CLIP_VIT_BIGG_14 + int32_t clip_skip = -1; + bool with_final_ln = true; + + CLIPTextModel(CLIPVersion version = OPENAI_CLIP_VIT_L_14, + bool with_final_ln = true, + int clip_skip_value = -1) + : version(version), with_final_ln(with_final_ln) { + if (version == OPEN_CLIP_VIT_H_14) { + hidden_size = 1024; + intermediate_size = 4096; + n_head = 16; + n_layer = 24; + } else if (version == OPEN_CLIP_VIT_BIGG_14) { // CLIPTextModelWithProjection + hidden_size = 1280; + intermediate_size = 5120; + n_head = 20; + n_layer = 32; + } + set_clip_skip(clip_skip_value); + + blocks["embeddings"] = std::shared_ptr(new CLIPEmbeddings(hidden_size, vocab_size, n_token)); + blocks["encoder"] = std::shared_ptr(new CLIPEncoder(n_layer, hidden_size, n_head, intermediate_size)); + blocks["final_layer_norm"] = std::shared_ptr(new LayerNorm(hidden_size)); + } + + void set_clip_skip(int skip) { + if (skip <= 0) { + skip = -1; + } + clip_skip = skip; + } + + struct ggml_tensor* get_token_embed_weight() { + auto embeddings = std::dynamic_pointer_cast(blocks["embeddings"]); + return embeddings->get_token_embed_weight(); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids, + struct ggml_tensor* tkn_embeddings, + size_t max_token_idx = 0, + bool return_pooled = false) { + // input_ids: [N, n_token] + auto embeddings = std::dynamic_pointer_cast(blocks["embeddings"]); + auto encoder = std::dynamic_pointer_cast(blocks["encoder"]); + auto final_layer_norm = std::dynamic_pointer_cast(blocks["final_layer_norm"]); + + auto x = embeddings->forward(ctx, input_ids, tkn_embeddings); // [N, n_token, hidden_size] + x = encoder->forward(ctx, x, return_pooled ? -1 : clip_skip, true); + if (return_pooled || with_final_ln) { + x = final_layer_norm->forward(ctx, x); + } + + if (return_pooled) { + auto text_projection = params["text_projection"]; + ggml_tensor* pooled = ggml_view_1d(ctx, x, hidden_size, x->nb[1] * max_token_idx); + if (text_projection != NULL) { + pooled = ggml_nn_linear(ctx, pooled, text_projection, NULL); + } else { + LOG_DEBUG("Missing text_projection matrix, assuming identity..."); + } + return pooled; // [hidden_size, 1, 1] + } + + return x; // [N, n_token, hidden_size] + } +}; + +class CLIPVisionModel : public GGMLBlock { +public: + // network hparams + int32_t num_channels = 3; + int32_t patch_size = 14; + int32_t image_size = 224; + int32_t num_positions = 257; // (image_size / patch_size)^2 + 1 + int32_t hidden_size = 1024; + int32_t intermediate_size = 4096; + int32_t n_head = 16; + int32_t n_layer = 24; + +public: + CLIPVisionModel(CLIPVersion version = OPENAI_CLIP_VIT_L_14) { + if (version == OPEN_CLIP_VIT_H_14) { + hidden_size = 1280; + intermediate_size = 5120; + n_head = 16; + n_layer = 32; + } else if (version == OPEN_CLIP_VIT_BIGG_14) { + hidden_size = 1664; + intermediate_size = 8192; + n_head = 16; + n_layer = 48; + } + + blocks["embeddings"] = std::shared_ptr(new CLIPVisionEmbeddings(hidden_size, num_channels, patch_size, image_size)); + blocks["pre_layernorm"] = std::shared_ptr(new LayerNorm(hidden_size)); + blocks["encoder"] = std::shared_ptr(new CLIPEncoder(n_layer, hidden_size, n_head, intermediate_size)); + blocks["post_layernorm"] = std::shared_ptr(new LayerNorm(hidden_size)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* pixel_values, bool return_pooled = true) { + // pixel_values: [N, num_channels, image_size, image_size] + auto embeddings = std::dynamic_pointer_cast(blocks["embeddings"]); + auto pre_layernorm = std::dynamic_pointer_cast(blocks["pre_layernorm"]); + auto encoder = std::dynamic_pointer_cast(blocks["encoder"]); + auto post_layernorm = std::dynamic_pointer_cast(blocks["post_layernorm"]); + + auto x = embeddings->forward(ctx, pixel_values); // [N, num_positions, embed_dim] + x = pre_layernorm->forward(ctx, x); + x = encoder->forward(ctx, x, -1, false); + // print_ggml_tensor(x, true, "ClipVisionModel x: "); + auto last_hidden_state = x; + x = post_layernorm->forward(ctx, x); // [N, n_token, hidden_size] + + GGML_ASSERT(x->ne[3] == 1); + if (return_pooled) { + ggml_tensor* pooled = ggml_cont(ctx, ggml_view_2d(ctx, x, x->ne[0], x->ne[2], x->nb[2], 0)); + return pooled; // [N, hidden_size] + } else { + // return x; // [N, n_token, hidden_size] + return last_hidden_state; // [N, n_token, hidden_size] + } + } +}; + +class CLIPProjection : public UnaryBlock { +protected: + int64_t in_features; + int64_t out_features; + bool transpose_weight; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = tensor_types.find(prefix + "weight") != tensor_types.end() ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + if (transpose_weight) { + params["weight"] = ggml_new_tensor_2d(ctx, wtype, out_features, in_features); + } else { + params["weight"] = ggml_new_tensor_2d(ctx, wtype, in_features, out_features); + } + } + +public: + CLIPProjection(int64_t in_features, + int64_t out_features, + bool transpose_weight = false) + : in_features(in_features), + out_features(out_features), + transpose_weight(transpose_weight) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + if (transpose_weight) { + w = ggml_cont(ctx, ggml_transpose(ctx, w)); + } + return ggml_nn_linear(ctx, x, w, NULL); + } +}; + +class CLIPVisionModelProjection : public GGMLBlock { +public: + int32_t hidden_size = 1024; + int32_t projection_dim = 768; + int32_t image_size = 224; + +public: + CLIPVisionModelProjection(CLIPVersion version = OPENAI_CLIP_VIT_L_14, + bool transpose_proj_w = false) { + if (version == OPEN_CLIP_VIT_H_14) { + hidden_size = 1280; + projection_dim = 1024; + } else if (version == OPEN_CLIP_VIT_BIGG_14) { + hidden_size = 1664; + } + + blocks["vision_model"] = std::shared_ptr(new CLIPVisionModel(version)); + blocks["visual_projection"] = std::shared_ptr(new CLIPProjection(hidden_size, projection_dim, transpose_proj_w)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* pixel_values) { + // pixel_values: [N, num_channels, image_size, image_size] + // return: [N, projection_dim] + auto vision_model = std::dynamic_pointer_cast(blocks["vision_model"]); + auto visual_projection = std::dynamic_pointer_cast(blocks["visual_projection"]); + + auto x = vision_model->forward(ctx, pixel_values); // [N, hidden_size] + x = visual_projection->forward(ctx, x); // [N, projection_dim] + + return x; // [N, projection_dim] + } +}; + +struct CLIPTextModelRunner : public GGMLRunner { + CLIPTextModel model; + + CLIPTextModelRunner(ggml_backend_t backend, + std::map& tensor_types, + const std::string prefix, + CLIPVersion version = OPENAI_CLIP_VIT_L_14, + bool with_final_ln = true, + int clip_skip_value = -1) + : GGMLRunner(backend), model(version, with_final_ln, clip_skip_value) { + model.init(params_ctx, tensor_types, prefix); + } + + std::string get_desc() { + return "clip"; + } + + void set_clip_skip(int clip_skip) { + model.set_clip_skip(clip_skip); + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + model.get_param_tensors(tensors, prefix); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids, + struct ggml_tensor* embeddings, + size_t max_token_idx = 0, + bool return_pooled = false) { + size_t N = input_ids->ne[1]; + size_t n_token = input_ids->ne[0]; + if (input_ids->ne[0] > model.n_token) { + GGML_ASSERT(input_ids->ne[0] % model.n_token == 0); + input_ids = ggml_reshape_2d(ctx, input_ids, model.n_token, input_ids->ne[0] / model.n_token); + } + + return model.forward(ctx, input_ids, embeddings, max_token_idx, return_pooled); + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* input_ids, + int num_custom_embeddings = 0, + void* custom_embeddings_data = NULL, + size_t max_token_idx = 0, + bool return_pooled = false) { + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + + input_ids = to_backend(input_ids); + + struct ggml_tensor* embeddings = NULL; + + if (num_custom_embeddings > 0 && custom_embeddings_data != NULL) { + auto token_embed_weight = model.get_token_embed_weight(); + auto custom_embeddings = ggml_new_tensor_2d(compute_ctx, + token_embed_weight->type, + model.hidden_size, + num_custom_embeddings); + set_backend_tensor_data(custom_embeddings, custom_embeddings_data); + + // concatenate custom embeddings + embeddings = ggml_concat(compute_ctx, token_embed_weight, custom_embeddings, 1); + } + + struct ggml_tensor* hidden_states = forward(compute_ctx, input_ids, embeddings, max_token_idx, return_pooled); + + ggml_build_forward_expand(gf, hidden_states); + + return gf; + } + + void compute(const int n_threads, + struct ggml_tensor* input_ids, + int num_custom_embeddings, + void* custom_embeddings_data, + size_t max_token_idx, + bool return_pooled, + ggml_tensor** output, + ggml_context* output_ctx = NULL) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(input_ids, num_custom_embeddings, custom_embeddings_data, max_token_idx, return_pooled); + }; + GGMLRunner::compute(get_graph, n_threads, true, output, output_ctx); + } +}; + +#endif // __CLIP_HPP__ diff --git a/common.hpp b/common.hpp new file mode 100644 index 000000000..9b5cc53be --- /dev/null +++ b/common.hpp @@ -0,0 +1,523 @@ +#ifndef __COMMON_HPP__ +#define __COMMON_HPP__ + +#include "ggml_extend.hpp" + +class DownSampleBlock : public GGMLBlock { +protected: + int channels; + int out_channels; + bool vae_downsample; + +public: + DownSampleBlock(int channels, + int out_channels, + bool vae_downsample = false) + : channels(channels), + out_channels(out_channels), + vae_downsample(vae_downsample) { + if (vae_downsample) { + blocks["conv"] = std::shared_ptr(new Conv2d(channels, out_channels, {3, 3}, {2, 2}, {0, 0})); + } else { + blocks["op"] = std::shared_ptr(new Conv2d(channels, out_channels, {3, 3}, {2, 2}, {1, 1})); + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, channels, h, w] + if (vae_downsample) { + auto conv = std::dynamic_pointer_cast(blocks["conv"]); + + x = ggml_pad(ctx, x, 1, 1, 0, 0); + x = conv->forward(ctx, x); + } else { + auto conv = std::dynamic_pointer_cast(blocks["op"]); + + x = conv->forward(ctx, x); + } + return x; // [N, out_channels, h/2, w/2] + } +}; + +class UpSampleBlock : public GGMLBlock { +protected: + int channels; + int out_channels; + +public: + UpSampleBlock(int channels, + int out_channels) + : channels(channels), + out_channels(out_channels) { + blocks["conv"] = std::shared_ptr(new Conv2d(channels, out_channels, {3, 3}, {1, 1}, {1, 1})); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, channels, h, w] + auto conv = std::dynamic_pointer_cast(blocks["conv"]); + + x = ggml_upscale(ctx, x, 2, GGML_SCALE_MODE_NEAREST); // [N, channels, h*2, w*2] + x = conv->forward(ctx, x); // [N, out_channels, h*2, w*2] + return x; + } +}; + +class ResBlock : public GGMLBlock { +protected: + // network hparams + int64_t channels; // model_channels * (1, 1, 1, 2, 2, 4, 4, 4) + int64_t emb_channels; // time_embed_dim + int64_t out_channels; // mult * model_channels + std::pair kernel_size; + int dims; + bool skip_t_emb; + bool exchange_temb_dims; + + std::shared_ptr conv_nd(int dims, + int64_t in_channels, + int64_t out_channels, + std::pair kernel_size, + std::pair padding) { + GGML_ASSERT(dims == 2 || dims == 3); + if (dims == 3) { + return std::shared_ptr(new Conv3dnx1x1(in_channels, out_channels, kernel_size.first, 1, padding.first)); + } else { + return std::shared_ptr(new Conv2d(in_channels, out_channels, kernel_size, {1, 1}, padding)); + } + } + +public: + ResBlock(int64_t channels, + int64_t emb_channels, + int64_t out_channels, + std::pair kernel_size = {3, 3}, + int dims = 2, + bool exchange_temb_dims = false, + bool skip_t_emb = false) + : channels(channels), + emb_channels(emb_channels), + out_channels(out_channels), + kernel_size(kernel_size), + dims(dims), + skip_t_emb(skip_t_emb), + exchange_temb_dims(exchange_temb_dims) { + std::pair padding = {kernel_size.first / 2, kernel_size.second / 2}; + blocks["in_layers.0"] = std::shared_ptr(new GroupNorm32(channels)); + // in_layer_1 is nn.SILU() + blocks["in_layers.2"] = conv_nd(dims, channels, out_channels, kernel_size, padding); + + if (!skip_t_emb) { + // emb_layer_0 is nn.SILU() + blocks["emb_layers.1"] = std::shared_ptr(new Linear(emb_channels, out_channels)); + } + + blocks["out_layers.0"] = std::shared_ptr(new GroupNorm32(out_channels)); + // out_layer_1 is nn.SILU() + // out_layer_2 is nn.Dropout(), skip for inference + blocks["out_layers.3"] = conv_nd(dims, out_channels, out_channels, kernel_size, padding); + + if (out_channels != channels) { + blocks["skip_connection"] = conv_nd(dims, channels, out_channels, {1, 1}, {0, 0}); + } + } + + virtual struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* emb = NULL) { + // For dims==3, we reduce dimension from 5d to 4d by merging h and w, in order not to change ggml + // [N, c, t, h, w] => [N, c, t, h * w] + // x: [N, channels, h, w] if dims == 2 else [N, channels, t, h, w] + // emb: [N, emb_channels] if dims == 2 else [N, t, emb_channels] + auto in_layers_0 = std::dynamic_pointer_cast(blocks["in_layers.0"]); + auto in_layers_2 = std::dynamic_pointer_cast(blocks["in_layers.2"]); + auto out_layers_0 = std::dynamic_pointer_cast(blocks["out_layers.0"]); + auto out_layers_3 = std::dynamic_pointer_cast(blocks["out_layers.3"]); + + if (emb == NULL) { + GGML_ASSERT(skip_t_emb); + } + + // in_layers + auto h = in_layers_0->forward(ctx, x); + h = ggml_silu_inplace(ctx, h); + h = in_layers_2->forward(ctx, h); // [N, out_channels, h, w] if dims == 2 else [N, out_channels, t, h, w] + + // emb_layers + if (!skip_t_emb) { + auto emb_layer_1 = std::dynamic_pointer_cast(blocks["emb_layers.1"]); + + auto emb_out = ggml_silu(ctx, emb); + emb_out = emb_layer_1->forward(ctx, emb_out); // [N, out_channels] if dims == 2 else [N, t, out_channels] + + if (dims == 2) { + emb_out = ggml_reshape_4d(ctx, emb_out, 1, 1, emb_out->ne[0], emb_out->ne[1]); // [N, out_channels, 1, 1] + } else { + emb_out = ggml_reshape_4d(ctx, emb_out, 1, emb_out->ne[0], emb_out->ne[1], emb_out->ne[2]); // [N, t, out_channels, 1] + if (exchange_temb_dims) { + // emb_out = rearrange(emb_out, "b t c ... -> b c t ...") + emb_out = ggml_cont(ctx, ggml_permute(ctx, emb_out, 0, 2, 1, 3)); // [N, out_channels, t, 1] + } + } + + h = ggml_add(ctx, h, emb_out); // [N, out_channels, h, w] if dims == 2 else [N, out_channels, t, h, w] + } + + // out_layers + h = out_layers_0->forward(ctx, h); + h = ggml_silu_inplace(ctx, h); + // dropout, skip for inference + h = out_layers_3->forward(ctx, h); + + // skip connection + if (out_channels != channels) { + auto skip_connection = std::dynamic_pointer_cast(blocks["skip_connection"]); + x = skip_connection->forward(ctx, x); // [N, out_channels, h, w] if dims == 2 else [N, out_channels, t, h, w] + } + + h = ggml_add(ctx, h, x); + return h; // [N, out_channels, h, w] if dims == 2 else [N, out_channels, t, h, w] + } +}; + +class GEGLU : public GGMLBlock { +protected: + int64_t dim_in; + int64_t dim_out; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, std::string prefix = "") { + enum ggml_type wtype = (tensor_types.find(prefix + "proj.weight") != tensor_types.end()) ? tensor_types[prefix + "proj.weight"] : GGML_TYPE_F32; + enum ggml_type bias_wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "proj.bias") != tensor_types.end()) ? tensor_types[prefix + "proj.bias"] : GGML_TYPE_F32; + params["proj.weight"] = ggml_new_tensor_2d(ctx, wtype, dim_in, dim_out * 2); + params["proj.bias"] = ggml_new_tensor_1d(ctx, bias_wtype, dim_out * 2); + } + +public: + GEGLU(int64_t dim_in, int64_t dim_out) + : dim_in(dim_in), dim_out(dim_out) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [ne3, ne2, ne1, dim_in] + // return: [ne3, ne2, ne1, dim_out] + struct ggml_tensor* w = params["proj.weight"]; + struct ggml_tensor* b = params["proj.bias"]; + + auto x_w = ggml_view_2d(ctx, w, w->ne[0], w->ne[1] / 2, w->nb[1], 0); // [dim_out, dim_in] + auto x_b = ggml_view_1d(ctx, b, b->ne[0] / 2, 0); // [dim_out, dim_in] + auto gate_w = ggml_view_2d(ctx, w, w->ne[0], w->ne[1] / 2, w->nb[1], w->nb[1] * w->ne[1] / 2); // [dim_out, ] + auto gate_b = ggml_view_1d(ctx, b, b->ne[0] / 2, b->nb[0] * b->ne[0] / 2); // [dim_out, ] + + auto x_in = x; + x = ggml_nn_linear(ctx, x_in, x_w, x_b); // [ne3, ne2, ne1, dim_out] + auto gate = ggml_nn_linear(ctx, x_in, gate_w, gate_b); // [ne3, ne2, ne1, dim_out] + + gate = ggml_gelu_inplace(ctx, gate); + + x = ggml_mul(ctx, x, gate); // [ne3, ne2, ne1, dim_out] + + return x; + } +}; + +class FeedForward : public GGMLBlock { +public: + FeedForward(int64_t dim, + int64_t dim_out, + int64_t mult = 4) { + int64_t inner_dim = dim * mult; + + blocks["net.0"] = std::shared_ptr(new GEGLU(dim, inner_dim)); + // net_1 is nn.Dropout(), skip for inference + blocks["net.2"] = std::shared_ptr(new Linear(inner_dim, dim_out)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [ne3, ne2, ne1, dim] + // return: [ne3, ne2, ne1, dim_out] + + auto net_0 = std::dynamic_pointer_cast(blocks["net.0"]); + auto net_2 = std::dynamic_pointer_cast(blocks["net.2"]); + + x = net_0->forward(ctx, x); // [ne3, ne2, ne1, inner_dim] + x = net_2->forward(ctx, x); // [ne3, ne2, ne1, dim_out] + return x; + } +}; + +class CrossAttention : public GGMLBlock { +protected: + int64_t query_dim; + int64_t context_dim; + int64_t n_head; + int64_t d_head; + bool flash_attn; + +public: + CrossAttention(int64_t query_dim, + int64_t context_dim, + int64_t n_head, + int64_t d_head, + bool flash_attn = false) + : n_head(n_head), + d_head(d_head), + query_dim(query_dim), + context_dim(context_dim), + flash_attn(flash_attn) { + int64_t inner_dim = d_head * n_head; + + blocks["to_q"] = std::shared_ptr(new Linear(query_dim, inner_dim, false)); + blocks["to_k"] = std::shared_ptr(new Linear(context_dim, inner_dim, false)); + blocks["to_v"] = std::shared_ptr(new Linear(context_dim, inner_dim, false)); + + blocks["to_out.0"] = std::shared_ptr(new Linear(inner_dim, query_dim)); + // to_out_1 is nn.Dropout(), skip for inference + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* context) { + // x: [N, n_token, query_dim] + // context: [N, n_context, context_dim] + // return: [N, n_token, query_dim] + auto to_q = std::dynamic_pointer_cast(blocks["to_q"]); + auto to_k = std::dynamic_pointer_cast(blocks["to_k"]); + auto to_v = std::dynamic_pointer_cast(blocks["to_v"]); + auto to_out_0 = std::dynamic_pointer_cast(blocks["to_out.0"]); + + int64_t n = x->ne[2]; + int64_t n_token = x->ne[1]; + int64_t n_context = context->ne[1]; + int64_t inner_dim = d_head * n_head; + + auto q = to_q->forward(ctx, x); // [N, n_token, inner_dim] + auto k = to_k->forward(ctx, context); // [N, n_context, inner_dim] + auto v = to_v->forward(ctx, context); // [N, n_context, inner_dim] + + x = ggml_nn_attention_ext(ctx, q, k, v, n_head, NULL, false, false, flash_attn); // [N, n_token, inner_dim] + + x = to_out_0->forward(ctx, x); // [N, n_token, query_dim] + return x; + } +}; + +class BasicTransformerBlock : public GGMLBlock { +protected: + int64_t n_head; + int64_t d_head; + bool ff_in; + +public: + BasicTransformerBlock(int64_t dim, + int64_t n_head, + int64_t d_head, + int64_t context_dim, + bool ff_in = false, + bool flash_attn = false) + : n_head(n_head), d_head(d_head), ff_in(ff_in) { + // disable_self_attn is always False + // disable_temporal_crossattention is always False + // switch_temporal_ca_to_sa is always False + // inner_dim is always None or equal to dim + // gated_ff is always True + blocks["attn1"] = std::shared_ptr(new CrossAttention(dim, dim, n_head, d_head, flash_attn)); + blocks["attn2"] = std::shared_ptr(new CrossAttention(dim, context_dim, n_head, d_head, flash_attn)); + blocks["ff"] = std::shared_ptr(new FeedForward(dim, dim)); + blocks["norm1"] = std::shared_ptr(new LayerNorm(dim)); + blocks["norm2"] = std::shared_ptr(new LayerNorm(dim)); + blocks["norm3"] = std::shared_ptr(new LayerNorm(dim)); + + if (ff_in) { + blocks["norm_in"] = std::shared_ptr(new LayerNorm(dim)); + blocks["ff_in"] = std::shared_ptr(new FeedForward(dim, dim)); + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* context) { + // x: [N, n_token, query_dim] + // context: [N, n_context, context_dim] + // return: [N, n_token, query_dim] + + auto attn1 = std::dynamic_pointer_cast(blocks["attn1"]); + auto attn2 = std::dynamic_pointer_cast(blocks["attn2"]); + auto ff = std::dynamic_pointer_cast(blocks["ff"]); + auto norm1 = std::dynamic_pointer_cast(blocks["norm1"]); + auto norm2 = std::dynamic_pointer_cast(blocks["norm2"]); + auto norm3 = std::dynamic_pointer_cast(blocks["norm3"]); + + if (ff_in) { + auto norm_in = std::dynamic_pointer_cast(blocks["norm_in"]); + auto ff_in = std::dynamic_pointer_cast(blocks["ff_in"]); + + auto x_skip = x; + x = norm_in->forward(ctx, x); + x = ff_in->forward(ctx, x); + // self.is_res is always True + x = ggml_add(ctx, x, x_skip); + } + + auto r = x; + x = norm1->forward(ctx, x); + x = attn1->forward(ctx, x, x); // self-attention + x = ggml_add(ctx, x, r); + r = x; + x = norm2->forward(ctx, x); + x = attn2->forward(ctx, x, context); // cross-attention + x = ggml_add(ctx, x, r); + r = x; + x = norm3->forward(ctx, x); + x = ff->forward(ctx, x); + x = ggml_add(ctx, x, r); + + return x; + } +}; + +class SpatialTransformer : public GGMLBlock { +protected: + int64_t in_channels; // mult * model_channels + int64_t n_head; + int64_t d_head; + int64_t depth = 1; // 1 + int64_t context_dim = 768; // hidden_size, 1024 for VERSION_SD2 + +public: + SpatialTransformer(int64_t in_channels, + int64_t n_head, + int64_t d_head, + int64_t depth, + int64_t context_dim, + bool flash_attn = false) + : in_channels(in_channels), + n_head(n_head), + d_head(d_head), + depth(depth), + context_dim(context_dim) { + // We will convert unet transformer linear to conv2d 1x1 when loading the weights, so use_linear is always False + // disable_self_attn is always False + int64_t inner_dim = n_head * d_head; // in_channels + blocks["norm"] = std::shared_ptr(new GroupNorm32(in_channels)); + blocks["proj_in"] = std::shared_ptr(new Conv2d(in_channels, inner_dim, {1, 1})); + + for (int i = 0; i < depth; i++) { + std::string name = "transformer_blocks." + std::to_string(i); + blocks[name] = std::shared_ptr(new BasicTransformerBlock(inner_dim, n_head, d_head, context_dim, false, flash_attn)); + } + + blocks["proj_out"] = std::shared_ptr(new Conv2d(inner_dim, in_channels, {1, 1})); + } + + virtual struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* context) { + // x: [N, in_channels, h, w] + // context: [N, max_position(aka n_token), hidden_size(aka context_dim)] + auto norm = std::dynamic_pointer_cast(blocks["norm"]); + auto proj_in = std::dynamic_pointer_cast(blocks["proj_in"]); + auto proj_out = std::dynamic_pointer_cast(blocks["proj_out"]); + + auto x_in = x; + int64_t n = x->ne[3]; + int64_t h = x->ne[1]; + int64_t w = x->ne[0]; + int64_t inner_dim = n_head * d_head; + + x = norm->forward(ctx, x); + x = proj_in->forward(ctx, x); // [N, inner_dim, h, w] + + x = ggml_cont(ctx, ggml_permute(ctx, x, 1, 2, 0, 3)); // [N, h, w, inner_dim] + x = ggml_reshape_3d(ctx, x, inner_dim, w * h, n); // [N, h * w, inner_dim] + + for (int i = 0; i < depth; i++) { + std::string name = "transformer_blocks." + std::to_string(i); + auto transformer_block = std::dynamic_pointer_cast(blocks[name]); + + x = transformer_block->forward(ctx, x, context); + } + + x = ggml_cont(ctx, ggml_permute(ctx, x, 1, 0, 2, 3)); // [N, inner_dim, h * w] + x = ggml_reshape_4d(ctx, x, w, h, inner_dim, n); // [N, inner_dim, h, w] + + // proj_out + x = proj_out->forward(ctx, x); // [N, in_channels, h, w] + + x = ggml_add(ctx, x, x_in); + return x; + } +}; + +class AlphaBlender : public GGMLBlock { +protected: + void init_params(struct ggml_context* ctx, std::map& tensor_types, std::string prefix = "") { + // Get the type of the "mix_factor" tensor from the input tensors map with the specified prefix + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.ypes.find(prefix + "mix_factor") != tensor_types.end()) ? tensor_types[prefix + "mix_factor"] : GGML_TYPE_F32; + params["mix_factor"] = ggml_new_tensor_1d(ctx, wtype, 1); + } + + float get_alpha() { + // image_only_indicator is always tensor([0.]) and since mix_factor.shape is [1,] + // so learned_with_images is same as learned + float alpha = ggml_backend_tensor_get_f32(params["mix_factor"]); + return sigmoid(alpha); + } + +public: + AlphaBlender() { + // merge_strategy is always learned_with_images + // for inference, we don't need to set alpha + // since mix_factor.shape is [1,], we don't need rearrange using rearrange_pattern + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x_spatial, + struct ggml_tensor* x_temporal) { + // image_only_indicator is always tensor([0.]) + float alpha = get_alpha(); + auto x = ggml_add(ctx, + ggml_scale(ctx, x_spatial, alpha), + ggml_scale(ctx, x_temporal, 1.0f - alpha)); + return x; + } +}; + +class VideoResBlock : public ResBlock { +public: + VideoResBlock(int channels, + int emb_channels, + int out_channels, + std::pair kernel_size = {3, 3}, + int64_t video_kernel_size = 3, + int dims = 2) // always 2 + : ResBlock(channels, emb_channels, out_channels, kernel_size, dims) { + blocks["time_stack"] = std::shared_ptr(new ResBlock(out_channels, emb_channels, out_channels, kernel_size, 3, true)); + blocks["time_mixer"] = std::shared_ptr(new AlphaBlender()); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* emb, + int num_video_frames) { + // x: [N, channels, h, w] aka [b*t, channels, h, w] + // emb: [N, emb_channels] aka [b*t, emb_channels] + // image_only_indicator is always tensor([0.]) + auto time_stack = std::dynamic_pointer_cast(blocks["time_stack"]); + auto time_mixer = std::dynamic_pointer_cast(blocks["time_mixer"]); + + x = ResBlock::forward(ctx, x, emb); + + int64_t T = num_video_frames; + int64_t B = x->ne[3] / T; + int64_t C = x->ne[2]; + int64_t H = x->ne[1]; + int64_t W = x->ne[0]; + + x = ggml_reshape_4d(ctx, x, W * H, C, T, B); // (b t) c h w -> b t c (h w) + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // b t c (h w) -> b c t (h w) + auto x_mix = x; + + emb = ggml_reshape_4d(ctx, emb, emb->ne[0], T, B, emb->ne[3]); // (b t) ... -> b t ... + + x = time_stack->forward(ctx, x, emb); // b t c (h w) + + x = time_mixer->forward(ctx, x_mix, x); // b t c (h w) + + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // b c t (h w) -> b t c (h w) + x = ggml_reshape_4d(ctx, x, W, H, C, T * B); // b t c (h w) -> (b t) c h w + + return x; + } +}; + +#endif // __COMMON_HPP__ diff --git a/conditioner.hpp b/conditioner.hpp new file mode 100644 index 000000000..3f89d5263 --- /dev/null +++ b/conditioner.hpp @@ -0,0 +1,1428 @@ +#ifndef __CONDITIONER_HPP__ +#define __CONDITIONER_HPP__ + +#include "clip.hpp" +#include "t5.hpp" + +struct SDCondition { + struct ggml_tensor* c_crossattn = NULL; // aka context + struct ggml_tensor* c_vector = NULL; // aka y + struct ggml_tensor* c_concat = NULL; + + SDCondition() = default; + SDCondition(struct ggml_tensor* c_crossattn, struct ggml_tensor* c_vector, struct ggml_tensor* c_concat) + : c_crossattn(c_crossattn), c_vector(c_vector), c_concat(c_concat) {} +}; + +struct Conditioner { + virtual SDCondition get_learned_condition(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) = 0; + virtual void alloc_params_buffer() = 0; + virtual void free_params_buffer() = 0; + virtual void get_param_tensors(std::map& tensors) = 0; + virtual size_t get_params_buffer_size() = 0; + virtual std::tuple> get_learned_condition_with_trigger(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int num_input_imgs, + int adm_in_channels = -1, + bool force_zero_embeddings = false) = 0; + virtual std::string remove_trigger_from_prompt(ggml_context* work_ctx, + const std::string& prompt) = 0; +}; + +// ldm.modules.encoders.modules.FrozenCLIPEmbedder +// Ref: https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/cad87bf4e3e0b0a759afa94e933527c3123d59bc/modules/sd_hijack_clip.py#L283 +struct FrozenCLIPEmbedderWithCustomWords : public Conditioner { + SDVersion version = VERSION_SD1; + PMVersion pm_version = PM_VERSION_1; + CLIPTokenizer tokenizer; + std::shared_ptr text_model; + std::shared_ptr text_model2; + + std::string trigger_word = "img"; // should be user settable + std::string embd_dir; + int32_t num_custom_embeddings = 0; + int32_t num_custom_embeddings_2 = 0; + std::vector token_embed_custom; + std::vector readed_embeddings; + + FrozenCLIPEmbedderWithCustomWords(ggml_backend_t backend, + std::map& tensor_types, + const std::string& embd_dir, + SDVersion version = VERSION_SD1, + PMVersion pv = PM_VERSION_1, + int clip_skip = -1) + : version(version), pm_version(pv), tokenizer(sd_version_is_sd2(version) ? 0 : 49407), embd_dir(embd_dir) { + if (sd_version_is_sd1(version)) { + text_model = std::make_shared(backend, tensor_types, "cond_stage_model.transformer.text_model", OPENAI_CLIP_VIT_L_14); + } else if (sd_version_is_sd2(version)) { + text_model = std::make_shared(backend, tensor_types, "cond_stage_model.transformer.text_model", OPEN_CLIP_VIT_H_14); + } else if (sd_version_is_sdxl(version)) { + text_model = std::make_shared(backend, tensor_types, "cond_stage_model.transformer.text_model", OPENAI_CLIP_VIT_L_14, false); + text_model2 = std::make_shared(backend, tensor_types, "cond_stage_model.1.transformer.text_model", OPEN_CLIP_VIT_BIGG_14, false); + } + set_clip_skip(clip_skip); + } + + void set_clip_skip(int clip_skip) { + if (clip_skip <= 0) { + clip_skip = 1; + if (sd_version_is_sd2(version) || sd_version_is_sdxl(version)) { + clip_skip = 2; + } + } + text_model->set_clip_skip(clip_skip); + if (sd_version_is_sdxl(version)) { + text_model2->set_clip_skip(clip_skip); + } + } + + void get_param_tensors(std::map& tensors) { + text_model->get_param_tensors(tensors, "cond_stage_model.transformer.text_model"); + if (sd_version_is_sdxl(version)) { + text_model2->get_param_tensors(tensors, "cond_stage_model.1.transformer.text_model"); + } + } + + void alloc_params_buffer() { + text_model->alloc_params_buffer(); + if (sd_version_is_sdxl(version)) { + text_model2->alloc_params_buffer(); + } + } + + void free_params_buffer() { + text_model->free_params_buffer(); + if (sd_version_is_sdxl(version)) { + text_model2->free_params_buffer(); + } + } + + size_t get_params_buffer_size() { + size_t buffer_size = text_model->get_params_buffer_size(); + if (sd_version_is_sdxl(version)) { + buffer_size += text_model2->get_params_buffer_size(); + } + return buffer_size; + } + + bool load_embedding(std::string embd_name, std::string embd_path, std::vector& bpe_tokens) { + // the order matters + ModelLoader model_loader; + if (!model_loader.init_from_file(embd_path)) { + LOG_ERROR("embedding '%s' failed", embd_name.c_str()); + return false; + } + if (std::find(readed_embeddings.begin(), readed_embeddings.end(), embd_name) != readed_embeddings.end()) { + LOG_DEBUG("embedding already read in: %s", embd_name.c_str()); + return true; + } + struct ggml_init_params params; + params.mem_size = 10 * 1024 * 1024; // max for custom embeddings 10 MB + params.mem_buffer = NULL; + params.no_alloc = false; + struct ggml_context* embd_ctx = ggml_init(params); + struct ggml_tensor* embd = NULL; + struct ggml_tensor* embd2 = NULL; + auto on_load = [&](const TensorStorage& tensor_storage, ggml_tensor** dst_tensor) { + if (tensor_storage.ne[0] != text_model->model.hidden_size) { + if (text_model2) { + if (tensor_storage.ne[0] == text_model2->model.hidden_size) { + embd2 = ggml_new_tensor_2d(embd_ctx, tensor_storage.type, text_model2->model.hidden_size, tensor_storage.n_dims > 1 ? tensor_storage.ne[1] : 1); + *dst_tensor = embd2; + } else { + LOG_DEBUG("embedding wrong hidden size, got %i, expected %i or %i", tensor_storage.ne[0], text_model->model.hidden_size, text_model2->model.hidden_size); + return false; + } + } else { + LOG_DEBUG("embedding wrong hidden size, got %i, expected %i", tensor_storage.ne[0], text_model->model.hidden_size); + return false; + } + } else { + embd = ggml_new_tensor_2d(embd_ctx, tensor_storage.type, text_model->model.hidden_size, tensor_storage.n_dims > 1 ? tensor_storage.ne[1] : 1); + *dst_tensor = embd; + } + return true; + }; + model_loader.load_tensors(on_load, NULL); + readed_embeddings.push_back(embd_name); + if (embd) { + int64_t hidden_size = text_model->model.hidden_size; + token_embed_custom.resize(token_embed_custom.size() + ggml_nbytes(embd)); + memcpy((void*)(token_embed_custom.data() + num_custom_embeddings * hidden_size * ggml_type_size(embd->type)), + embd->data, + ggml_nbytes(embd)); + for (int i = 0; i < embd->ne[1]; i++) { + bpe_tokens.push_back(text_model->model.vocab_size + num_custom_embeddings); + // LOG_DEBUG("new custom token: %i", text_model.vocab_size + num_custom_embeddings); + num_custom_embeddings++; + } + LOG_DEBUG("embedding '%s' applied, custom embeddings: %i", embd_name.c_str(), num_custom_embeddings); + } + if (embd2) { + int64_t hidden_size = text_model2->model.hidden_size; + token_embed_custom.resize(token_embed_custom.size() + ggml_nbytes(embd2)); + memcpy((void*)(token_embed_custom.data() + num_custom_embeddings_2 * hidden_size * ggml_type_size(embd2->type)), + embd2->data, + ggml_nbytes(embd2)); + for (int i = 0; i < embd2->ne[1]; i++) { + bpe_tokens.push_back(text_model2->model.vocab_size + num_custom_embeddings_2); + // LOG_DEBUG("new custom token: %i", text_model.vocab_size + num_custom_embeddings); + num_custom_embeddings_2++; + } + LOG_DEBUG("embedding '%s' applied, custom embeddings: %i (text model 2)", embd_name.c_str(), num_custom_embeddings_2); + } + return true; + } + + std::tuple, std::vector, std::vector> + tokenize_with_trigger_token(std::string text, + int num_input_imgs, + int32_t image_token, + bool padding = false) { + return tokenize_with_trigger_token(text, num_input_imgs, image_token, + text_model->model.n_token, padding); + } + + std::vector convert_token_to_id(std::string text) { + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + size_t word_end = str.find(","); + std::string embd_name = word_end == std::string::npos ? str : str.substr(0, word_end); + embd_name = trim(embd_name); + std::string embd_path = get_full_path(embd_dir, embd_name + ".pt"); + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".ckpt"); + } + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".safetensors"); + } + if (embd_path.size() > 0) { + if (load_embedding(embd_name, embd_path, bpe_tokens)) { + if (word_end != std::string::npos) { + str = str.substr(word_end); + } else { + str = ""; + } + return true; + } + } + return false; + }; + std::vector curr_tokens = tokenizer.encode(text, on_new_token_cb); + return curr_tokens; + } + + std::string decode(const std::vector& tokens) { + return tokenizer.decode(tokens); + } + + std::tuple, std::vector, std::vector> + tokenize_with_trigger_token(std::string text, + int num_input_imgs, + int32_t image_token, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + size_t word_end = str.find(","); + std::string embd_name = word_end == std::string::npos ? str : str.substr(0, word_end); + embd_name = trim(embd_name); + std::string embd_path = get_full_path(embd_dir, embd_name + ".pt"); + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".ckpt"); + } + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".safetensors"); + } + if (embd_path.size() > 0) { + if (load_embedding(embd_name, embd_path, bpe_tokens)) { + if (word_end != std::string::npos) { + str = str.substr(word_end); + } else { + str = ""; + } + return true; + } + } + return false; + }; + + std::vector tokens; + std::vector weights; + std::vector class_token_mask; + int32_t class_idx = -1, tokens_acc = 0; + for (const auto& item : parsed_attention) { + std::vector class_token_index; + std::vector clean_input_ids; + const std::string& curr_text = item.first; + float curr_weight = item.second; + // printf(" %s: %f \n", curr_text.c_str(), curr_weight); + std::vector curr_tokens = tokenizer.encode(curr_text, on_new_token_cb); + int32_t clean_index = 0; + for (uint32_t i = 0; i < curr_tokens.size(); i++) { + int token_id = curr_tokens[i]; + if (token_id == image_token) + class_token_index.push_back(clean_index - 1); + else { + clean_input_ids.push_back(token_id); + clean_index++; + } + } + // GGML_ASSERT(class_token_index.size() == 1); // PhotoMaker currently does not support multiple + // trigger words in a single prompt. + if (class_token_index.size() == 1) { + // Expand the class word token and corresponding mask + int class_token = clean_input_ids[class_token_index[0]]; + class_idx = tokens_acc + class_token_index[0]; + std::vector clean_input_ids_tmp; + for (uint32_t i = 0; i < class_token_index[0]; i++) + clean_input_ids_tmp.push_back(clean_input_ids[i]); + for (uint32_t i = 0; i < (pm_version == PM_VERSION_2 ? 2 * num_input_imgs : num_input_imgs); i++) + clean_input_ids_tmp.push_back(class_token); + for (uint32_t i = class_token_index[0] + 1; i < clean_input_ids.size(); i++) + clean_input_ids_tmp.push_back(clean_input_ids[i]); + clean_input_ids.clear(); + clean_input_ids = clean_input_ids_tmp; + } + tokens_acc += clean_index; + tokens.insert(tokens.end(), clean_input_ids.begin(), clean_input_ids.end()); + weights.insert(weights.end(), clean_input_ids.size(), curr_weight); + } + // BUG!! double couting, pad_tokens will add BOS at the beginning + // tokens.insert(tokens.begin(), tokenizer.BOS_TOKEN_ID); + // weights.insert(weights.begin(), 1.0); + + tokenizer.pad_tokens(tokens, weights, max_length, padding); + int offset = pm_version == PM_VERSION_2 ? 2 * num_input_imgs : num_input_imgs; + for (uint32_t i = 0; i < tokens.size(); i++) { + // if (class_idx + 1 <= i && i < class_idx + 1 + 2*num_input_imgs) // photomaker V2 has num_tokens(=2)*num_input_imgs + if (class_idx + 1 <= i && i < class_idx + 1 + offset) // photomaker V2 has num_tokens(=2)*num_input_imgs + // hardcode for now + class_token_mask.push_back(true); + else + class_token_mask.push_back(false); + } + + // printf("["); + // for (int i = 0; i < tokens.size(); i++) { + // printf("%d, ", class_token_mask[i] ? 1 : 0); + // } + // printf("]\n"); + + // for (int i = 0; i < tokens.size(); i++) { + // std::cout << tokens[i] << ":" << weights[i] << ", "; + // } + // std::cout << std::endl; + + return std::make_tuple(tokens, weights, class_token_mask); + } + + std::pair, std::vector> tokenize(std::string text, + bool padding = false) { + return tokenize(text, text_model->model.n_token, padding); + } + + std::pair, std::vector> tokenize(std::string text, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + size_t word_end = str.find(","); + std::string embd_name = word_end == std::string::npos ? str : str.substr(0, word_end); + embd_name = trim(embd_name); + std::string embd_path = get_full_path(embd_dir, embd_name + ".pt"); + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".ckpt"); + } + if (embd_path.size() == 0) { + embd_path = get_full_path(embd_dir, embd_name + ".safetensors"); + } + if (embd_path.size() > 0) { + if (load_embedding(embd_name, embd_path, bpe_tokens)) { + if (word_end != std::string::npos) { + str = str.substr(word_end); + } else { + str = ""; + } + return true; + } + } + return false; + }; + + std::vector tokens; + std::vector weights; + for (const auto& item : parsed_attention) { + const std::string& curr_text = item.first; + float curr_weight = item.second; + std::vector curr_tokens = tokenizer.encode(curr_text, on_new_token_cb); + tokens.insert(tokens.end(), curr_tokens.begin(), curr_tokens.end()); + weights.insert(weights.end(), curr_tokens.size(), curr_weight); + } + + tokenizer.pad_tokens(tokens, weights, max_length, padding); + + // for (int i = 0; i < tokens.size(); i++) { + // std::cout << tokens[i] << ":" << weights[i] << ", "; + // } + // std::cout << std::endl; + + return {tokens, weights}; + } + + SDCondition get_learned_condition_common(ggml_context* work_ctx, + int n_threads, + std::vector& tokens, + std::vector& weights, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + set_clip_skip(clip_skip); + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* hidden_states = NULL; // [N, n_token, hidden_size] + struct ggml_tensor* chunk_hidden_states = NULL; // [n_token, hidden_size] or [n_token, hidden_size + hidden_size2] + struct ggml_tensor* chunk_hidden_states1 = NULL; // [n_token, hidden_size] + struct ggml_tensor* chunk_hidden_states2 = NULL; // [n_token, hidden_size2] + struct ggml_tensor* pooled = NULL; + std::vector hidden_states_vec; + + size_t chunk_len = 77; + size_t chunk_count = tokens.size() / chunk_len; + for (int chunk_idx = 0; chunk_idx < chunk_count; chunk_idx++) { + std::vector chunk_tokens(tokens.begin() + chunk_idx * chunk_len, + tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(weights.begin() + chunk_idx * chunk_len, + weights.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + struct ggml_tensor* input_ids2 = NULL; + size_t max_token_idx = 0; + if (sd_version_is_sdxl(version)) { + auto it = std::find(chunk_tokens.begin(), chunk_tokens.end(), tokenizer.EOS_TOKEN_ID); + if (it != chunk_tokens.end()) { + std::fill(std::next(it), chunk_tokens.end(), 0); + } + + max_token_idx = std::min(std::distance(chunk_tokens.begin(), it), chunk_tokens.size() - 1); + + input_ids2 = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + + // for (int i = 0; i < chunk_tokens.size(); i++) { + // printf("%d ", chunk_tokens[i]); + // } + // printf("\n"); + } + + { + text_model->compute(n_threads, + input_ids, + num_custom_embeddings, + token_embed_custom.data(), + max_token_idx, + false, + &chunk_hidden_states1, + work_ctx); + if (sd_version_is_sdxl(version)) { + text_model2->compute(n_threads, + input_ids2, + num_custom_embeddings, + token_embed_custom.data(), + max_token_idx, + false, + &chunk_hidden_states2, work_ctx); + // concat + chunk_hidden_states = ggml_tensor_concat(work_ctx, chunk_hidden_states1, chunk_hidden_states2, 0); + + if (chunk_idx == 0) { + text_model2->compute(n_threads, + input_ids2, + num_custom_embeddings, + token_embed_custom.data(), + max_token_idx, + true, + &pooled, + work_ctx); + } + } else { + chunk_hidden_states = chunk_hidden_states1; + } + } + + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing condition graph completed, taking %" PRId64 " ms", t1 - t0); + ggml_tensor* result = ggml_dup_tensor(work_ctx, chunk_hidden_states); + { + float original_mean = ggml_tensor_mean(chunk_hidden_states); + for (int i2 = 0; i2 < chunk_hidden_states->ne[2]; i2++) { + for (int i1 = 0; i1 < chunk_hidden_states->ne[1]; i1++) { + for (int i0 = 0; i0 < chunk_hidden_states->ne[0]; i0++) { + float value = ggml_tensor_get_f32(chunk_hidden_states, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(result, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(result); + ggml_tensor_scale(result, (original_mean / new_mean)); + } + if (force_zero_embeddings) { + float* vec = (float*)result->data; + for (int i = 0; i < ggml_nelements(result); i++) { + vec[i] = 0; + } + } + hidden_states_vec.insert(hidden_states_vec.end(), (float*)result->data, ((float*)result->data) + ggml_nelements(result)); + } + + hidden_states = vector_to_ggml_tensor(work_ctx, hidden_states_vec); + hidden_states = ggml_reshape_2d(work_ctx, + hidden_states, + chunk_hidden_states->ne[0], + ggml_nelements(hidden_states) / chunk_hidden_states->ne[0]); + + ggml_tensor* vec = NULL; + if (sd_version_is_sdxl(version)) { + int out_dim = 256; + vec = ggml_new_tensor_1d(work_ctx, GGML_TYPE_F32, adm_in_channels); + // [0:1280] + size_t offset = 0; + memcpy(vec->data, pooled->data, ggml_nbytes(pooled)); + offset += ggml_nbytes(pooled); + + // original_size_as_tuple + float orig_width = (float)width; + float orig_height = (float)height; + std::vector timesteps = {orig_height, orig_width}; + + ggml_tensor* embed_view = ggml_view_2d(work_ctx, vec, out_dim, 2, ggml_type_size(GGML_TYPE_F32) * out_dim, offset); + offset += ggml_nbytes(embed_view); + set_timestep_embedding(timesteps, embed_view, out_dim); + // print_ggml_tensor(ggml_reshape_1d(work_ctx, embed_view, out_dim * 2)); + // crop_coords_top_left + float crop_coord_top = 0.f; + float crop_coord_left = 0.f; + timesteps = {crop_coord_top, crop_coord_left}; + embed_view = ggml_view_2d(work_ctx, vec, out_dim, 2, ggml_type_size(GGML_TYPE_F32) * out_dim, offset); + offset += ggml_nbytes(embed_view); + set_timestep_embedding(timesteps, embed_view, out_dim); + // print_ggml_tensor(ggml_reshape_1d(work_ctx, embed_view, out_dim * 2)); + // target_size_as_tuple + float target_width = (float)width; + float target_height = (float)height; + timesteps = {target_height, target_width}; + embed_view = ggml_view_2d(work_ctx, vec, out_dim, 2, ggml_type_size(GGML_TYPE_F32) * out_dim, offset); + offset += ggml_nbytes(embed_view); + set_timestep_embedding(timesteps, embed_view, out_dim); + // print_ggml_tensor(ggml_reshape_1d(work_ctx, embed_view, out_dim * 2)); + GGML_ASSERT(offset == ggml_nbytes(vec)); + } + // print_ggml_tensor(result); + return SDCondition(hidden_states, vec, NULL); + } + + std::tuple> + get_learned_condition_with_trigger(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int num_input_imgs, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + auto image_tokens = convert_token_to_id(trigger_word); + // if(image_tokens.size() == 1){ + // printf(" image token id is: %d \n", image_tokens[0]); + // } + GGML_ASSERT(image_tokens.size() == 1); + auto tokens_and_weights = tokenize_with_trigger_token(text, + num_input_imgs, + image_tokens[0], + true); + std::vector& tokens = std::get<0>(tokens_and_weights); + std::vector& weights = std::get<1>(tokens_and_weights); + std::vector& clsm = std::get<2>(tokens_and_weights); + // printf("tokens: \n"); + // for(int i = 0; i < tokens.size(); ++i) + // printf("%d ", tokens[i]); + // printf("\n"); + // printf("clsm: \n"); + // for(int i = 0; i < clsm.size(); ++i) + // printf("%d ", clsm[i]?1:0); + // printf("\n"); + auto cond = get_learned_condition_common(work_ctx, n_threads, tokens, weights, clip_skip, width, height, adm_in_channels, force_zero_embeddings); + return std::make_tuple(cond, clsm); + } + + std::string remove_trigger_from_prompt(ggml_context* work_ctx, + const std::string& prompt) { + auto image_tokens = convert_token_to_id(trigger_word); + GGML_ASSERT(image_tokens.size() == 1); + auto tokens_and_weights = tokenize(prompt, false); + std::vector& tokens = tokens_and_weights.first; + auto it = std::find(tokens.begin(), tokens.end(), image_tokens[0]); + GGML_ASSERT(it != tokens.end()); // prompt must have trigger word + tokens.erase(it); + return decode(tokens); + } + + SDCondition get_learned_condition(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + auto tokens_and_weights = tokenize(text, true); + std::vector& tokens = tokens_and_weights.first; + std::vector& weights = tokens_and_weights.second; + return get_learned_condition_common(work_ctx, n_threads, tokens, weights, clip_skip, width, height, adm_in_channels, force_zero_embeddings); + } +}; + +struct FrozenCLIPVisionEmbedder : public GGMLRunner { + CLIPVisionModelProjection vision_model; + + FrozenCLIPVisionEmbedder(ggml_backend_t backend, std::map& tensor_types) + : vision_model(OPEN_CLIP_VIT_H_14, true), GGMLRunner(backend) { + vision_model.init(params_ctx, tensor_types, "cond_stage_model.transformer"); + } + + std::string get_desc() { + return "clip_vision"; + } + + void get_param_tensors(std::map& tensors) { + vision_model.get_param_tensors(tensors, "cond_stage_model.transformer"); + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* pixel_values) { + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + + pixel_values = to_backend(pixel_values); + + struct ggml_tensor* hidden_states = vision_model.forward(compute_ctx, pixel_values); + + ggml_build_forward_expand(gf, hidden_states); + + return gf; + } + + void compute(const int n_threads, + ggml_tensor* pixel_values, + ggml_tensor** output, + ggml_context* output_ctx) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(pixel_values); + }; + GGMLRunner::compute(get_graph, n_threads, true, output, output_ctx); + } +}; + +struct SD3CLIPEmbedder : public Conditioner { + CLIPTokenizer clip_l_tokenizer; + CLIPTokenizer clip_g_tokenizer; + T5UniGramTokenizer t5_tokenizer; + std::shared_ptr clip_l; + std::shared_ptr clip_g; + std::shared_ptr t5; + + SD3CLIPEmbedder(ggml_backend_t backend, + std::map& tensor_types, + int clip_skip = -1) + : clip_g_tokenizer(0) { + clip_l = std::make_shared(backend, tensor_types, "text_encoders.clip_l.transformer.text_model", OPENAI_CLIP_VIT_L_14, false); + clip_g = std::make_shared(backend, tensor_types, "text_encoders.clip_g.transformer.text_model", OPEN_CLIP_VIT_BIGG_14, false); + t5 = std::make_shared(backend, tensor_types, "text_encoders.t5xxl.transformer"); + set_clip_skip(clip_skip); + } + + void set_clip_skip(int clip_skip) { + if (clip_skip <= 0) { + clip_skip = 2; + } + clip_l->set_clip_skip(clip_skip); + clip_g->set_clip_skip(clip_skip); + } + + void get_param_tensors(std::map& tensors) { + clip_l->get_param_tensors(tensors, "text_encoders.clip_l.transformer.text_model"); + clip_g->get_param_tensors(tensors, "text_encoders.clip_g.transformer.text_model"); + t5->get_param_tensors(tensors, "text_encoders.t5xxl.transformer"); + } + + void alloc_params_buffer() { + clip_l->alloc_params_buffer(); + clip_g->alloc_params_buffer(); + t5->alloc_params_buffer(); + } + + void free_params_buffer() { + clip_l->free_params_buffer(); + clip_g->free_params_buffer(); + t5->free_params_buffer(); + } + + size_t get_params_buffer_size() { + size_t buffer_size = clip_l->get_params_buffer_size(); + buffer_size += clip_g->get_params_buffer_size(); + buffer_size += t5->get_params_buffer_size(); + return buffer_size; + } + + std::vector, std::vector>> tokenize(std::string text, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + return false; + }; + + std::vector clip_l_tokens; + std::vector clip_l_weights; + std::vector clip_g_tokens; + std::vector clip_g_weights; + std::vector t5_tokens; + std::vector t5_weights; + for (const auto& item : parsed_attention) { + const std::string& curr_text = item.first; + float curr_weight = item.second; + + std::vector curr_tokens = clip_l_tokenizer.encode(curr_text, on_new_token_cb); + clip_l_tokens.insert(clip_l_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + clip_l_weights.insert(clip_l_weights.end(), curr_tokens.size(), curr_weight); + + curr_tokens = clip_g_tokenizer.encode(curr_text, on_new_token_cb); + clip_g_tokens.insert(clip_g_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + clip_g_weights.insert(clip_g_weights.end(), curr_tokens.size(), curr_weight); + + curr_tokens = t5_tokenizer.Encode(curr_text, true); + t5_tokens.insert(t5_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + t5_weights.insert(t5_weights.end(), curr_tokens.size(), curr_weight); + } + + clip_l_tokenizer.pad_tokens(clip_l_tokens, clip_l_weights, max_length, padding); + clip_g_tokenizer.pad_tokens(clip_g_tokens, clip_g_weights, max_length, padding); + t5_tokenizer.pad_tokens(t5_tokens, t5_weights, NULL, max_length, padding); + + // for (int i = 0; i < clip_l_tokens.size(); i++) { + // std::cout << clip_l_tokens[i] << ":" << clip_l_weights[i] << ", "; + // } + // std::cout << std::endl; + + // for (int i = 0; i < clip_g_tokens.size(); i++) { + // std::cout << clip_g_tokens[i] << ":" << clip_g_weights[i] << ", "; + // } + // std::cout << std::endl; + + // for (int i = 0; i < t5_tokens.size(); i++) { + // std::cout << t5_tokens[i] << ":" << t5_weights[i] << ", "; + // } + // std::cout << std::endl; + + return {{clip_l_tokens, clip_l_weights}, {clip_g_tokens, clip_g_weights}, {t5_tokens, t5_weights}}; + } + + SDCondition get_learned_condition_common(ggml_context* work_ctx, + int n_threads, + std::vector, std::vector>> token_and_weights, + int clip_skip, + bool force_zero_embeddings = false) { + set_clip_skip(clip_skip); + auto& clip_l_tokens = token_and_weights[0].first; + auto& clip_l_weights = token_and_weights[0].second; + auto& clip_g_tokens = token_and_weights[1].first; + auto& clip_g_weights = token_and_weights[1].second; + auto& t5_tokens = token_and_weights[2].first; + auto& t5_weights = token_and_weights[2].second; + + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* hidden_states = NULL; // [N, n_token*2, 4096] + struct ggml_tensor* chunk_hidden_states = NULL; // [n_token*2, 4096] + struct ggml_tensor* chunk_hidden_states_l = NULL; // [n_token, hidden_size_l] + struct ggml_tensor* chunk_hidden_states_g = NULL; // [n_token, hidden_size_g] + struct ggml_tensor* chunk_hidden_states_t5 = NULL; // [n_token, hidden_size_t5] + struct ggml_tensor* pooled = NULL; + struct ggml_tensor* pooled_l = NULL; // [768,] + struct ggml_tensor* pooled_g = NULL; // [1280,] + std::vector hidden_states_vec; + + size_t chunk_len = 77; + size_t chunk_count = clip_l_tokens.size() / chunk_len; + for (int chunk_idx = 0; chunk_idx < chunk_count; chunk_idx++) { + // clip_l + { + std::vector chunk_tokens(clip_l_tokens.begin() + chunk_idx * chunk_len, + clip_l_tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(clip_l_weights.begin() + chunk_idx * chunk_len, + clip_l_weights.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + size_t max_token_idx = 0; + + clip_l->compute(n_threads, + input_ids, + 0, + NULL, + max_token_idx, + false, + &chunk_hidden_states_l, + work_ctx); + { + auto tensor = chunk_hidden_states_l; + float original_mean = ggml_tensor_mean(tensor); + for (int i2 = 0; i2 < tensor->ne[2]; i2++) { + for (int i1 = 0; i1 < tensor->ne[1]; i1++) { + for (int i0 = 0; i0 < tensor->ne[0]; i0++) { + float value = ggml_tensor_get_f32(tensor, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(tensor, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(tensor); + ggml_tensor_scale(tensor, (original_mean / new_mean)); + } + + if (chunk_idx == 0) { + auto it = std::find(chunk_tokens.begin(), chunk_tokens.end(), clip_l_tokenizer.EOS_TOKEN_ID); + max_token_idx = std::min(std::distance(chunk_tokens.begin(), it), chunk_tokens.size() - 1); + clip_l->compute(n_threads, + input_ids, + 0, + NULL, + max_token_idx, + true, + &pooled_l, + work_ctx); + } + } + + // clip_g + { + std::vector chunk_tokens(clip_g_tokens.begin() + chunk_idx * chunk_len, + clip_g_tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(clip_g_weights.begin() + chunk_idx * chunk_len, + clip_g_weights.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + size_t max_token_idx = 0; + + clip_g->compute(n_threads, + input_ids, + 0, + NULL, + max_token_idx, + false, + &chunk_hidden_states_g, + work_ctx); + + { + auto tensor = chunk_hidden_states_g; + float original_mean = ggml_tensor_mean(tensor); + for (int i2 = 0; i2 < tensor->ne[2]; i2++) { + for (int i1 = 0; i1 < tensor->ne[1]; i1++) { + for (int i0 = 0; i0 < tensor->ne[0]; i0++) { + float value = ggml_tensor_get_f32(tensor, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(tensor, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(tensor); + ggml_tensor_scale(tensor, (original_mean / new_mean)); + } + + if (chunk_idx == 0) { + auto it = std::find(chunk_tokens.begin(), chunk_tokens.end(), clip_g_tokenizer.EOS_TOKEN_ID); + max_token_idx = std::min(std::distance(chunk_tokens.begin(), it), chunk_tokens.size() - 1); + clip_g->compute(n_threads, + input_ids, + 0, + NULL, + max_token_idx, + true, + &pooled_g, + work_ctx); + } + } + + // t5 + { + std::vector chunk_tokens(t5_tokens.begin() + chunk_idx * chunk_len, + t5_tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(t5_weights.begin() + chunk_idx * chunk_len, + t5_weights.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + + t5->compute(n_threads, + input_ids, + NULL, + &chunk_hidden_states_t5, + work_ctx); + { + auto tensor = chunk_hidden_states_t5; + float original_mean = ggml_tensor_mean(tensor); + for (int i2 = 0; i2 < tensor->ne[2]; i2++) { + for (int i1 = 0; i1 < tensor->ne[1]; i1++) { + for (int i0 = 0; i0 < tensor->ne[0]; i0++) { + float value = ggml_tensor_get_f32(tensor, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(tensor, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(tensor); + ggml_tensor_scale(tensor, (original_mean / new_mean)); + } + } + + auto chunk_hidden_states_lg_pad = ggml_new_tensor_3d(work_ctx, + chunk_hidden_states_l->type, + 4096, + chunk_hidden_states_l->ne[1], + chunk_hidden_states_l->ne[2]); // [n_token, 4096] + + for (int i2 = 0; i2 < chunk_hidden_states_lg_pad->ne[2]; i2++) { + for (int i1 = 0; i1 < chunk_hidden_states_lg_pad->ne[1]; i1++) { + for (int i0 = 0; i0 < chunk_hidden_states_lg_pad->ne[0]; i0++) { + float value = 0.f; + if (i0 < chunk_hidden_states_l->ne[0]) { + value = ggml_tensor_get_f32(chunk_hidden_states_l, i0, i1, i2); + } else if (i0 < chunk_hidden_states_l->ne[0] + chunk_hidden_states_g->ne[0]) { + value = ggml_tensor_get_f32(chunk_hidden_states_g, i0 - chunk_hidden_states_l->ne[0], i1, i2); + } + ggml_tensor_set_f32(chunk_hidden_states_lg_pad, value, i0, i1, i2); + } + } + } + + chunk_hidden_states = ggml_tensor_concat(work_ctx, chunk_hidden_states_lg_pad, chunk_hidden_states_t5, 1); // [n_token*2, 4096] + + if (chunk_idx == 0) { + pooled = ggml_tensor_concat(work_ctx, pooled_l, pooled_g, 0); // [768 + 1280] + } + + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing condition graph completed, taking %" PRId64 " ms", t1 - t0); + if (force_zero_embeddings) { + float* vec = (float*)chunk_hidden_states->data; + for (int i = 0; i < ggml_nelements(chunk_hidden_states); i++) { + vec[i] = 0; + } + } + + hidden_states_vec.insert(hidden_states_vec.end(), + (float*)chunk_hidden_states->data, + ((float*)chunk_hidden_states->data) + ggml_nelements(chunk_hidden_states)); + } + + hidden_states = vector_to_ggml_tensor(work_ctx, hidden_states_vec); + hidden_states = ggml_reshape_2d(work_ctx, + hidden_states, + chunk_hidden_states->ne[0], + ggml_nelements(hidden_states) / chunk_hidden_states->ne[0]); + return SDCondition(hidden_states, pooled, NULL); + } + + SDCondition get_learned_condition(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + auto tokens_and_weights = tokenize(text, 77, true); + return get_learned_condition_common(work_ctx, n_threads, tokens_and_weights, clip_skip, force_zero_embeddings); + } + + std::tuple> get_learned_condition_with_trigger(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int num_input_imgs, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + GGML_ASSERT(0 && "Not implemented yet!"); + } + + std::string remove_trigger_from_prompt(ggml_context* work_ctx, + const std::string& prompt) { + GGML_ASSERT(0 && "Not implemented yet!"); + } +}; + +struct FluxCLIPEmbedder : public Conditioner { + CLIPTokenizer clip_l_tokenizer; + T5UniGramTokenizer t5_tokenizer; + std::shared_ptr clip_l; + std::shared_ptr t5; + size_t chunk_len = 256; + + FluxCLIPEmbedder(ggml_backend_t backend, + std::map& tensor_types, + int clip_skip = -1) { + clip_l = std::make_shared(backend, tensor_types, "text_encoders.clip_l.transformer.text_model", OPENAI_CLIP_VIT_L_14, true); + t5 = std::make_shared(backend, tensor_types, "text_encoders.t5xxl.transformer"); + set_clip_skip(clip_skip); + } + + void set_clip_skip(int clip_skip) { + if (clip_skip <= 0) { + clip_skip = 2; + } + clip_l->set_clip_skip(clip_skip); + } + + void get_param_tensors(std::map& tensors) { + clip_l->get_param_tensors(tensors, "text_encoders.clip_l.transformer.text_model"); + t5->get_param_tensors(tensors, "text_encoders.t5xxl.transformer"); + } + + void alloc_params_buffer() { + clip_l->alloc_params_buffer(); + t5->alloc_params_buffer(); + } + + void free_params_buffer() { + clip_l->free_params_buffer(); + t5->free_params_buffer(); + } + + size_t get_params_buffer_size() { + size_t buffer_size = clip_l->get_params_buffer_size(); + buffer_size += t5->get_params_buffer_size(); + return buffer_size; + } + + std::vector, std::vector>> tokenize(std::string text, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + return false; + }; + + std::vector clip_l_tokens; + std::vector clip_l_weights; + std::vector t5_tokens; + std::vector t5_weights; + for (const auto& item : parsed_attention) { + const std::string& curr_text = item.first; + float curr_weight = item.second; + + std::vector curr_tokens = clip_l_tokenizer.encode(curr_text, on_new_token_cb); + clip_l_tokens.insert(clip_l_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + clip_l_weights.insert(clip_l_weights.end(), curr_tokens.size(), curr_weight); + + curr_tokens = t5_tokenizer.Encode(curr_text, true); + t5_tokens.insert(t5_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + t5_weights.insert(t5_weights.end(), curr_tokens.size(), curr_weight); + } + + clip_l_tokenizer.pad_tokens(clip_l_tokens, clip_l_weights, 77, padding); + t5_tokenizer.pad_tokens(t5_tokens, t5_weights, NULL, max_length, padding); + + // for (int i = 0; i < clip_l_tokens.size(); i++) { + // std::cout << clip_l_tokens[i] << ":" << clip_l_weights[i] << ", "; + // } + // std::cout << std::endl; + + // for (int i = 0; i < t5_tokens.size(); i++) { + // std::cout << t5_tokens[i] << ":" << t5_weights[i] << ", "; + // } + // std::cout << std::endl; + + return {{clip_l_tokens, clip_l_weights}, {t5_tokens, t5_weights}}; + } + + SDCondition get_learned_condition_common(ggml_context* work_ctx, + int n_threads, + std::vector, std::vector>> token_and_weights, + int clip_skip, + bool force_zero_embeddings = false) { + set_clip_skip(clip_skip); + auto& clip_l_tokens = token_and_weights[0].first; + auto& clip_l_weights = token_and_weights[0].second; + auto& t5_tokens = token_and_weights[1].first; + auto& t5_weights = token_and_weights[1].second; + + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* hidden_states = NULL; // [N, n_token, 4096] + struct ggml_tensor* chunk_hidden_states = NULL; // [n_token, 4096] + struct ggml_tensor* pooled = NULL; // [768,] + std::vector hidden_states_vec; + + size_t chunk_count = t5_tokens.size() / chunk_len; + for (int chunk_idx = 0; chunk_idx < chunk_count; chunk_idx++) { + // clip_l + if (chunk_idx == 0) { + size_t chunk_len_l = 77; + std::vector chunk_tokens(clip_l_tokens.begin(), + clip_l_tokens.begin() + chunk_len_l); + std::vector chunk_weights(clip_l_weights.begin(), + clip_l_weights.begin() + chunk_len_l); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + size_t max_token_idx = 0; + + auto it = std::find(chunk_tokens.begin(), chunk_tokens.end(), clip_l_tokenizer.EOS_TOKEN_ID); + max_token_idx = std::min(std::distance(chunk_tokens.begin(), it), chunk_tokens.size() - 1); + + clip_l->compute(n_threads, + input_ids, + 0, + NULL, + max_token_idx, + true, + &pooled, + work_ctx); + } + + // t5 + { + std::vector chunk_tokens(t5_tokens.begin() + chunk_idx * chunk_len, + t5_tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(t5_weights.begin() + chunk_idx * chunk_len, + t5_weights.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + + t5->compute(n_threads, + input_ids, + NULL, + &chunk_hidden_states, + work_ctx); + { + auto tensor = chunk_hidden_states; + float original_mean = ggml_tensor_mean(tensor); + for (int i2 = 0; i2 < tensor->ne[2]; i2++) { + for (int i1 = 0; i1 < tensor->ne[1]; i1++) { + for (int i0 = 0; i0 < tensor->ne[0]; i0++) { + float value = ggml_tensor_get_f32(tensor, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(tensor, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(tensor); + ggml_tensor_scale(tensor, (original_mean / new_mean)); + } + } + + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing condition graph completed, taking %" PRId64 " ms", t1 - t0); + if (force_zero_embeddings) { + float* vec = (float*)chunk_hidden_states->data; + for (int i = 0; i < ggml_nelements(chunk_hidden_states); i++) { + vec[i] = 0; + } + } + + hidden_states_vec.insert(hidden_states_vec.end(), + (float*)chunk_hidden_states->data, + ((float*)chunk_hidden_states->data) + ggml_nelements(chunk_hidden_states)); + } + + hidden_states = vector_to_ggml_tensor(work_ctx, hidden_states_vec); + hidden_states = ggml_reshape_2d(work_ctx, + hidden_states, + chunk_hidden_states->ne[0], + ggml_nelements(hidden_states) / chunk_hidden_states->ne[0]); + return SDCondition(hidden_states, pooled, NULL); + } + + SDCondition get_learned_condition(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + auto tokens_and_weights = tokenize(text, chunk_len, true); + return get_learned_condition_common(work_ctx, n_threads, tokens_and_weights, clip_skip, force_zero_embeddings); + } + + std::tuple> get_learned_condition_with_trigger(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int num_input_imgs, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + GGML_ASSERT(0 && "Not implemented yet!"); + } + + std::string remove_trigger_from_prompt(ggml_context* work_ctx, + const std::string& prompt) { + GGML_ASSERT(0 && "Not implemented yet!"); + } +}; + +struct PixArtCLIPEmbedder : public Conditioner { + T5UniGramTokenizer t5_tokenizer; + std::shared_ptr t5; + size_t chunk_len = 512; + bool use_mask = false; + int mask_pad = 1; + + PixArtCLIPEmbedder(ggml_backend_t backend, + std::map& tensor_types, + int clip_skip = -1, + bool use_mask = false, + int mask_pad = 1) + : use_mask(use_mask), mask_pad(mask_pad) { + t5 = std::make_shared(backend, tensor_types, "text_encoders.t5xxl.transformer"); + } + + void set_clip_skip(int clip_skip) { + } + + void get_param_tensors(std::map& tensors) { + t5->get_param_tensors(tensors, "text_encoders.t5xxl.transformer"); + } + + void alloc_params_buffer() { + t5->alloc_params_buffer(); + } + + void free_params_buffer() { + t5->free_params_buffer(); + } + + size_t get_params_buffer_size() { + size_t buffer_size = 0; + + buffer_size += t5->get_params_buffer_size(); + + return buffer_size; + } + + std::tuple, std::vector, std::vector> tokenize(std::string text, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + auto on_new_token_cb = [&](std::string& str, std::vector& bpe_tokens) -> bool { + return false; + }; + + std::vector t5_tokens; + std::vector t5_weights; + std::vector t5_mask; + for (const auto& item : parsed_attention) { + const std::string& curr_text = item.first; + float curr_weight = item.second; + + std::vector curr_tokens = t5_tokenizer.Encode(curr_text, true); + t5_tokens.insert(t5_tokens.end(), curr_tokens.begin(), curr_tokens.end()); + t5_weights.insert(t5_weights.end(), curr_tokens.size(), curr_weight); + } + + t5_tokenizer.pad_tokens(t5_tokens, t5_weights, &t5_mask, max_length, padding); + + return {t5_tokens, t5_weights, t5_mask}; + } + + void modify_mask_to_attend_padding(struct ggml_tensor* mask, int max_seq_length, int num_extra_padding = 8) { + float* mask_data = (float*)mask->data; + int num_pad = 0; + for (int64_t i = 0; i < max_seq_length; i++) { + if (num_pad >= num_extra_padding) { + break; + } + if (std::isinf(mask_data[i])) { + mask_data[i] = 0; + ++num_pad; + } + } + // LOG_DEBUG("PAD: %d", num_pad); + } + + SDCondition get_learned_condition_common(ggml_context* work_ctx, + int n_threads, + std::tuple, std::vector, std::vector> token_and_weights, + int clip_skip, + bool force_zero_embeddings = false) { + auto& t5_tokens = std::get<0>(token_and_weights); + auto& t5_weights = std::get<1>(token_and_weights); + auto& t5_attn_mask_vec = std::get<2>(token_and_weights); + + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* hidden_states = NULL; // [N, n_token, 4096] + struct ggml_tensor* chunk_hidden_states = NULL; // [n_token, 4096] + struct ggml_tensor* pooled = NULL; // [768,] + struct ggml_tensor* t5_attn_mask = vector_to_ggml_tensor(work_ctx, t5_attn_mask_vec); // [768,] + + std::vector hidden_states_vec; + + size_t chunk_count = t5_tokens.size() / chunk_len; + + for (int chunk_idx = 0; chunk_idx < chunk_count; chunk_idx++) { + // t5 + std::vector chunk_tokens(t5_tokens.begin() + chunk_idx * chunk_len, + t5_tokens.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_weights(t5_weights.begin() + chunk_idx * chunk_len, + t5_weights.begin() + (chunk_idx + 1) * chunk_len); + std::vector chunk_mask(t5_attn_mask_vec.begin() + chunk_idx * chunk_len, + t5_attn_mask_vec.begin() + (chunk_idx + 1) * chunk_len); + + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, chunk_tokens); + auto t5_attn_mask_chunk = use_mask ? vector_to_ggml_tensor(work_ctx, chunk_mask) : NULL; + + t5->compute(n_threads, + input_ids, + t5_attn_mask_chunk, + &chunk_hidden_states, + work_ctx); + { + auto tensor = chunk_hidden_states; + float original_mean = ggml_tensor_mean(tensor); + for (int i2 = 0; i2 < tensor->ne[2]; i2++) { + for (int i1 = 0; i1 < tensor->ne[1]; i1++) { + for (int i0 = 0; i0 < tensor->ne[0]; i0++) { + float value = ggml_tensor_get_f32(tensor, i0, i1, i2); + value *= chunk_weights[i1]; + ggml_tensor_set_f32(tensor, value, i0, i1, i2); + } + } + } + float new_mean = ggml_tensor_mean(tensor); + ggml_tensor_scale(tensor, (original_mean / new_mean)); + } + + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing condition graph completed, taking %" PRId64 " ms", t1 - t0); + if (force_zero_embeddings) { + float* vec = (float*)chunk_hidden_states->data; + for (int i = 0; i < ggml_nelements(chunk_hidden_states); i++) { + vec[i] = 0; + } + } + + hidden_states_vec.insert(hidden_states_vec.end(), + (float*)chunk_hidden_states->data, + ((float*)chunk_hidden_states->data) + ggml_nelements(chunk_hidden_states)); + } + + if (hidden_states_vec.size() > 0) { + hidden_states = vector_to_ggml_tensor(work_ctx, hidden_states_vec); + hidden_states = ggml_reshape_2d(work_ctx, + hidden_states, + chunk_hidden_states->ne[0], + ggml_nelements(hidden_states) / chunk_hidden_states->ne[0]); + } else { + hidden_states = ggml_new_tensor_2d(work_ctx, GGML_TYPE_F32, 4096, 256); + ggml_set_f32(hidden_states, 0.f); + } + + modify_mask_to_attend_padding(t5_attn_mask, ggml_nelements(t5_attn_mask), mask_pad); + + return SDCondition(hidden_states, t5_attn_mask, NULL); + } + + SDCondition get_learned_condition(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + auto tokens_and_weights = tokenize(text, chunk_len, true); + return get_learned_condition_common(work_ctx, n_threads, tokens_and_weights, clip_skip, force_zero_embeddings); + } + + std::tuple> get_learned_condition_with_trigger(ggml_context* work_ctx, + int n_threads, + const std::string& text, + int clip_skip, + int width, + int height, + int num_input_imgs, + int adm_in_channels = -1, + bool force_zero_embeddings = false) { + GGML_ASSERT(0 && "Not implemented yet!"); + } + + std::string remove_trigger_from_prompt(ggml_context* work_ctx, + const std::string& prompt) { + GGML_ASSERT(0 && "Not implemented yet!"); + } +}; + +#endif diff --git a/control.hpp b/control.hpp new file mode 100644 index 000000000..23b75feff --- /dev/null +++ b/control.hpp @@ -0,0 +1,458 @@ +#ifndef __CONTROL_HPP__ +#define __CONTROL_HPP__ + +#include "common.hpp" +#include "ggml_extend.hpp" +#include "model.h" + +#define CONTROL_NET_GRAPH_SIZE 1536 + +/* + =================================== ControlNet =================================== + Reference: https://github.com/comfyanonymous/ComfyUI/blob/master/comfy/cldm/cldm.py + +*/ +class ControlNetBlock : public GGMLBlock { +protected: + SDVersion version = VERSION_SD1; + // network hparams + int in_channels = 4; + int out_channels = 4; + int hint_channels = 3; + int num_res_blocks = 2; + std::vector attention_resolutions = {4, 2, 1}; + std::vector channel_mult = {1, 2, 4, 4}; + std::vector transformer_depth = {1, 1, 1, 1}; + int time_embed_dim = 1280; // model_channels*4 + int num_heads = 8; + int num_head_channels = -1; // channels // num_heads + int context_dim = 768; // 1024 for VERSION_SD2, 2048 for VERSION_SDXL + +public: + int model_channels = 320; + int adm_in_channels = 2816; // only for VERSION_SDXL + + ControlNetBlock(SDVersion version = VERSION_SD1) + : version(version) { + if (sd_version_is_sd2(version)) { + context_dim = 1024; + num_head_channels = 64; + num_heads = -1; + } else if (sd_version_is_sdxl(version)) { + context_dim = 2048; + attention_resolutions = {4, 2}; + channel_mult = {1, 2, 4}; + transformer_depth = {1, 2, 10}; + num_head_channels = 64; + num_heads = -1; + } else if (version == VERSION_SVD) { + in_channels = 8; + out_channels = 4; + context_dim = 1024; + adm_in_channels = 768; + num_head_channels = 64; + num_heads = -1; + } + + blocks["time_embed.0"] = std::shared_ptr(new Linear(model_channels, time_embed_dim)); + // time_embed_1 is nn.SiLU() + blocks["time_embed.2"] = std::shared_ptr(new Linear(time_embed_dim, time_embed_dim)); + + if (sd_version_is_sdxl(version) || version == VERSION_SVD) { + blocks["label_emb.0.0"] = std::shared_ptr(new Linear(adm_in_channels, time_embed_dim)); + // label_emb_1 is nn.SiLU() + blocks["label_emb.0.2"] = std::shared_ptr(new Linear(time_embed_dim, time_embed_dim)); + } + + // input_blocks + blocks["input_blocks.0.0"] = std::shared_ptr(new Conv2d(in_channels, model_channels, {3, 3}, {1, 1}, {1, 1})); + + std::vector input_block_chans; + input_block_chans.push_back(model_channels); + int ch = model_channels; + int input_block_idx = 0; + int ds = 1; + + auto get_resblock = [&](int64_t channels, int64_t emb_channels, int64_t out_channels) -> ResBlock* { + return new ResBlock(channels, emb_channels, out_channels); + }; + + auto get_attention_layer = [&](int64_t in_channels, + int64_t n_head, + int64_t d_head, + int64_t depth, + int64_t context_dim) -> SpatialTransformer* { + return new SpatialTransformer(in_channels, n_head, d_head, depth, context_dim); + }; + + auto make_zero_conv = [&](int64_t channels) { + return new Conv2d(channels, channels, {1, 1}); + }; + + blocks["zero_convs.0.0"] = std::shared_ptr(make_zero_conv(model_channels)); + + blocks["input_hint_block.0"] = std::shared_ptr(new Conv2d(hint_channels, 16, {3, 3}, {1, 1}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.2"] = std::shared_ptr(new Conv2d(16, 16, {3, 3}, {1, 1}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.4"] = std::shared_ptr(new Conv2d(16, 32, {3, 3}, {2, 2}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.6"] = std::shared_ptr(new Conv2d(32, 32, {3, 3}, {1, 1}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.8"] = std::shared_ptr(new Conv2d(32, 96, {3, 3}, {2, 2}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.10"] = std::shared_ptr(new Conv2d(96, 96, {3, 3}, {1, 1}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.12"] = std::shared_ptr(new Conv2d(96, 256, {3, 3}, {2, 2}, {1, 1})); + // nn.SiLU() + blocks["input_hint_block.14"] = std::shared_ptr(new Conv2d(256, model_channels, {3, 3}, {1, 1}, {1, 1})); + + size_t len_mults = channel_mult.size(); + for (int i = 0; i < len_mults; i++) { + int mult = channel_mult[i]; + for (int j = 0; j < num_res_blocks; j++) { + input_block_idx += 1; + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".0"; + blocks[name] = std::shared_ptr(get_resblock(ch, time_embed_dim, mult * model_channels)); + + ch = mult * model_channels; + if (std::find(attention_resolutions.begin(), attention_resolutions.end(), ds) != attention_resolutions.end()) { + int n_head = num_heads; + int d_head = ch / num_heads; + if (num_head_channels != -1) { + d_head = num_head_channels; + n_head = ch / d_head; + } + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".1"; + blocks[name] = std::shared_ptr(get_attention_layer(ch, + n_head, + d_head, + transformer_depth[i], + context_dim)); + } + blocks["zero_convs." + std::to_string(input_block_idx) + ".0"] = std::shared_ptr(make_zero_conv(ch)); + input_block_chans.push_back(ch); + } + if (i != len_mults - 1) { + input_block_idx += 1; + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".0"; + blocks[name] = std::shared_ptr(new DownSampleBlock(ch, ch)); + + blocks["zero_convs." + std::to_string(input_block_idx) + ".0"] = std::shared_ptr(make_zero_conv(ch)); + + input_block_chans.push_back(ch); + ds *= 2; + } + } + + // middle blocks + int n_head = num_heads; + int d_head = ch / num_heads; + if (num_head_channels != -1) { + d_head = num_head_channels; + n_head = ch / d_head; + } + blocks["middle_block.0"] = std::shared_ptr(get_resblock(ch, time_embed_dim, ch)); + blocks["middle_block.1"] = std::shared_ptr(get_attention_layer(ch, + n_head, + d_head, + transformer_depth[transformer_depth.size() - 1], + context_dim)); + blocks["middle_block.2"] = std::shared_ptr(get_resblock(ch, time_embed_dim, ch)); + + // middle_block_out + blocks["middle_block_out.0"] = std::shared_ptr(make_zero_conv(ch)); + } + + struct ggml_tensor* resblock_forward(std::string name, + struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* emb) { + auto block = std::dynamic_pointer_cast(blocks[name]); + return block->forward(ctx, x, emb); + } + + struct ggml_tensor* attention_layer_forward(std::string name, + struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* context) { + auto block = std::dynamic_pointer_cast(blocks[name]); + return block->forward(ctx, x, context); + } + + struct ggml_tensor* input_hint_block_forward(struct ggml_context* ctx, + struct ggml_tensor* hint, + struct ggml_tensor* emb, + struct ggml_tensor* context) { + int num_input_blocks = 15; + auto h = hint; + for (int i = 0; i < num_input_blocks; i++) { + if (i % 2 == 0) { + auto block = std::dynamic_pointer_cast(blocks["input_hint_block." + std::to_string(i)]); + + h = block->forward(ctx, h); + } else { + h = ggml_silu_inplace(ctx, h); + } + } + return h; + } + + std::vector forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* hint, + struct ggml_tensor* guided_hint, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* y = NULL) { + // x: [N, in_channels, h, w] or [N, in_channels/2, h, w] + // timesteps: [N,] + // context: [N, max_position, hidden_size] or [1, max_position, hidden_size]. for example, [N, 77, 768] + // y: [N, adm_in_channels] or [1, adm_in_channels] + if (context != NULL) { + if (context->ne[2] != x->ne[3]) { + context = ggml_repeat(ctx, context, ggml_new_tensor_3d(ctx, GGML_TYPE_F32, context->ne[0], context->ne[1], x->ne[3])); + } + } + + if (y != NULL) { + if (y->ne[1] != x->ne[3]) { + y = ggml_repeat(ctx, y, ggml_new_tensor_2d(ctx, GGML_TYPE_F32, y->ne[0], x->ne[3])); + } + } + + auto time_embed_0 = std::dynamic_pointer_cast(blocks["time_embed.0"]); + auto time_embed_2 = std::dynamic_pointer_cast(blocks["time_embed.2"]); + auto input_blocks_0_0 = std::dynamic_pointer_cast(blocks["input_blocks.0.0"]); + auto zero_convs_0 = std::dynamic_pointer_cast(blocks["zero_convs.0.0"]); + + auto middle_block_out = std::dynamic_pointer_cast(blocks["middle_block_out.0"]); + + auto t_emb = ggml_nn_timestep_embedding(ctx, timesteps, model_channels); // [N, model_channels] + + auto emb = time_embed_0->forward(ctx, t_emb); + emb = ggml_silu_inplace(ctx, emb); + emb = time_embed_2->forward(ctx, emb); // [N, time_embed_dim] + + // SDXL/SVD + if (y != NULL) { + auto label_embed_0 = std::dynamic_pointer_cast(blocks["label_emb.0.0"]); + auto label_embed_2 = std::dynamic_pointer_cast(blocks["label_emb.0.2"]); + + auto label_emb = label_embed_0->forward(ctx, y); + label_emb = ggml_silu_inplace(ctx, label_emb); + label_emb = label_embed_2->forward(ctx, label_emb); // [N, time_embed_dim] + + emb = ggml_add(ctx, emb, label_emb); // [N, time_embed_dim] + } + + std::vector outs; + + if (guided_hint == NULL) { + guided_hint = input_hint_block_forward(ctx, hint, emb, context); + } + outs.push_back(guided_hint); + + // input_blocks + + // input block 0 + auto h = input_blocks_0_0->forward(ctx, x); + h = ggml_add(ctx, h, guided_hint); + outs.push_back(zero_convs_0->forward(ctx, h)); + + // input block 1-11 + size_t len_mults = channel_mult.size(); + int input_block_idx = 0; + int ds = 1; + for (int i = 0; i < len_mults; i++) { + int mult = channel_mult[i]; + for (int j = 0; j < num_res_blocks; j++) { + input_block_idx += 1; + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".0"; + h = resblock_forward(name, ctx, h, emb); // [N, mult*model_channels, h, w] + if (std::find(attention_resolutions.begin(), attention_resolutions.end(), ds) != attention_resolutions.end()) { + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".1"; + h = attention_layer_forward(name, ctx, h, context); // [N, mult*model_channels, h, w] + } + + auto zero_conv = std::dynamic_pointer_cast(blocks["zero_convs." + std::to_string(input_block_idx) + ".0"]); + + outs.push_back(zero_conv->forward(ctx, h)); + } + if (i != len_mults - 1) { + ds *= 2; + input_block_idx += 1; + + std::string name = "input_blocks." + std::to_string(input_block_idx) + ".0"; + auto block = std::dynamic_pointer_cast(blocks[name]); + + h = block->forward(ctx, h); // [N, mult*model_channels, h/(2^(i+1)), w/(2^(i+1))] + + auto zero_conv = std::dynamic_pointer_cast(blocks["zero_convs." + std::to_string(input_block_idx) + ".0"]); + + outs.push_back(zero_conv->forward(ctx, h)); + } + } + // [N, 4*model_channels, h/8, w/8] + + // middle_block + h = resblock_forward("middle_block.0", ctx, h, emb); // [N, 4*model_channels, h/8, w/8] + h = attention_layer_forward("middle_block.1", ctx, h, context); // [N, 4*model_channels, h/8, w/8] + h = resblock_forward("middle_block.2", ctx, h, emb); // [N, 4*model_channels, h/8, w/8] + + // out + outs.push_back(middle_block_out->forward(ctx, h)); + return outs; + } +}; + +struct ControlNet : public GGMLRunner { + SDVersion version = VERSION_SD1; + ControlNetBlock control_net; + + ggml_backend_buffer_t control_buffer = NULL; // keep control output tensors in backend memory + ggml_context* control_ctx = NULL; + std::vector controls; // (12 input block outputs, 1 middle block output) SD 1.5 + struct ggml_tensor* guided_hint = NULL; // guided_hint cache, for faster inference + bool guided_hint_cached = false; + + ControlNet(ggml_backend_t backend, + std::map& tensor_types, + SDVersion version = VERSION_SD1) + : GGMLRunner(backend), control_net(version) { + control_net.init(params_ctx, tensor_types, ""); + } + + ~ControlNet() { + free_control_ctx(); + } + + void alloc_control_ctx(std::vector outs) { + struct ggml_init_params params; + params.mem_size = static_cast(outs.size() * ggml_tensor_overhead()) + 1024 * 1024; + params.mem_buffer = NULL; + params.no_alloc = true; + control_ctx = ggml_init(params); + + controls.resize(outs.size() - 1); + + size_t control_buffer_size = 0; + + guided_hint = ggml_dup_tensor(control_ctx, outs[0]); + control_buffer_size += ggml_nbytes(guided_hint); + + for (int i = 0; i < outs.size() - 1; i++) { + controls[i] = ggml_dup_tensor(control_ctx, outs[i + 1]); + control_buffer_size += ggml_nbytes(controls[i]); + } + + control_buffer = ggml_backend_alloc_ctx_tensors(control_ctx, backend); + + LOG_DEBUG("control buffer size %.2fMB", control_buffer_size * 1.f / 1024.f / 1024.f); + } + + void free_control_ctx() { + if (control_buffer != NULL) { + ggml_backend_buffer_free(control_buffer); + control_buffer = NULL; + } + if (control_ctx != NULL) { + ggml_free(control_ctx); + control_ctx = NULL; + } + guided_hint = NULL; + guided_hint_cached = false; + controls.clear(); + } + + std::string get_desc() { + return "control_net"; + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + control_net.get_param_tensors(tensors, prefix); + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* x, + struct ggml_tensor* hint, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* y = NULL) { + struct ggml_cgraph* gf = ggml_new_graph_custom(compute_ctx, CONTROL_NET_GRAPH_SIZE, false); + + x = to_backend(x); + if (guided_hint_cached) { + hint = NULL; + } else { + hint = to_backend(hint); + } + context = to_backend(context); + y = to_backend(y); + timesteps = to_backend(timesteps); + + auto outs = control_net.forward(compute_ctx, + x, + hint, + guided_hint_cached ? guided_hint : NULL, + timesteps, + context, + y); + + if (control_ctx == NULL) { + alloc_control_ctx(outs); + } + + ggml_build_forward_expand(gf, ggml_cpy(compute_ctx, outs[0], guided_hint)); + for (int i = 0; i < outs.size() - 1; i++) { + ggml_build_forward_expand(gf, ggml_cpy(compute_ctx, outs[i + 1], controls[i])); + } + + return gf; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* hint, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* y, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL) { + // x: [N, in_channels, h, w] + // timesteps: [N, ] + // context: [N, max_position, hidden_size]([N, 77, 768]) or [1, max_position, hidden_size] + // y: [N, adm_in_channels] or [1, adm_in_channels] + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(x, hint, timesteps, context, y); + }; + + GGMLRunner::compute(get_graph, n_threads, false, output, output_ctx); + guided_hint_cached = true; + } + + bool load_from_file(const std::string& file_path) { + LOG_INFO("loading control net from '%s'", file_path.c_str()); + alloc_params_buffer(); + std::map tensors; + control_net.get_param_tensors(tensors); + std::set ignore_tensors; + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path)) { + LOG_ERROR("init control net model loader from file failed: '%s'", file_path.c_str()); + return false; + } + + bool success = model_loader.load_tensors(tensors, backend, ignore_tensors); + + if (!success) { + LOG_ERROR("load control net tensors from model loader failed"); + return false; + } + + LOG_INFO("control net model loaded"); + return success; + } +}; + +#endif // __CONTROL_HPP__ \ No newline at end of file diff --git a/denoiser.hpp b/denoiser.hpp new file mode 100644 index 000000000..d4bcec590 --- /dev/null +++ b/denoiser.hpp @@ -0,0 +1,1403 @@ +#ifndef __DENOISER_HPP__ +#define __DENOISER_HPP__ + +#include "ggml_extend.hpp" +#include "gits_noise.inl" + +/*================================================= CompVisDenoiser ==================================================*/ + +// Ref: https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/external.py + +#define TIMESTEPS 1000 +#define FLUX_TIMESTEPS 1000 + +struct SigmaSchedule { + int version = 0; + typedef std::function t_to_sigma_t; + + virtual std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) = 0; +}; + +struct DiscreteSchedule : SigmaSchedule { + std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) { + std::vector result; + + int t_max = TIMESTEPS - 1; + + if (n == 0) { + return result; + } else if (n == 1) { + result.push_back(t_to_sigma((float)t_max)); + result.push_back(0); + return result; + } + + float step = static_cast(t_max) / static_cast(n - 1); + for (uint32_t i = 0; i < n; ++i) { + float t = t_max - step * i; + result.push_back(t_to_sigma(t)); + } + result.push_back(0); + return result; + } +}; + +struct ExponentialSchedule : SigmaSchedule { + std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) { + std::vector sigmas; + + // Calculate step size + float log_sigma_min = std::log(sigma_min); + float log_sigma_max = std::log(sigma_max); + float step = (log_sigma_max - log_sigma_min) / (n - 1); + + // Fill sigmas with exponential values + for (uint32_t i = 0; i < n; ++i) { + float sigma = std::exp(log_sigma_max - step * i); + sigmas.push_back(sigma); + } + + sigmas.push_back(0.0f); + + return sigmas; + } +}; + +/* interp and linear_interp adapted from dpilger26's NumCpp library: + * https://github.com/dpilger26/NumCpp/tree/5e40aab74d14e257d65d3dc385c9ff9e2120c60e */ +constexpr double interp(double left, double right, double perc) noexcept { + return (left * (1. - perc)) + (right * perc); +} + +/* This will make the assumption that the reference x and y values are + * already sorted in ascending order because they are being generated as + * such in the calling function */ +std::vector linear_interp(std::vector new_x, + const std::vector ref_x, + const std::vector ref_y) { + const size_t len_x = new_x.size(); + size_t i = 0; + size_t j = 0; + std::vector new_y(len_x); + + if (ref_x.size() != ref_y.size()) { + LOG_ERROR("Linear Interpolation Failed: length mismatch"); + return new_y; + } + + /* Adjusted bounds checking to ensure new_x is within ref_x range */ + if (new_x[0] < ref_x[0]) { + new_x[0] = ref_x[0]; + } + if (new_x.back() > ref_x.back()) { + new_x.back() = ref_x.back(); + } + + while (i < len_x) { + if ((ref_x[j] > new_x[i]) || (new_x[i] > ref_x[j + 1])) { + j++; + continue; + } + + const double perc = static_cast(new_x[i] - ref_x[j]) / static_cast(ref_x[j + 1] - ref_x[j]); + + new_y[i] = interp(ref_y[j], ref_y[j + 1], perc); + i++; + } + + return new_y; +} + +std::vector linear_space(const float start, const float end, const size_t num_points) { + std::vector result(num_points); + const float inc = (end - start) / (static_cast(num_points - 1)); + + if (num_points > 0) { + result[0] = start; + + for (size_t i = 1; i < num_points; i++) { + result[i] = result[i - 1] + inc; + } + } + + return result; +} + +std::vector log_linear_interpolation(std::vector sigma_in, + const size_t new_len) { + const size_t s_len = sigma_in.size(); + std::vector x_vals = linear_space(0.f, 1.f, s_len); + std::vector y_vals(s_len); + + /* Reverses the input array to be ascending instead of descending, + * also hits it with a log, it is log-linear interpolation after all */ + for (size_t i = 0; i < s_len; i++) { + y_vals[i] = std::log(sigma_in[s_len - i - 1]); + } + + std::vector new_x_vals = linear_space(0.f, 1.f, new_len); + std::vector new_y_vals = linear_interp(new_x_vals, x_vals, y_vals); + std::vector results(new_len); + + for (size_t i = 0; i < new_len; i++) { + results[i] = static_cast(std::exp(new_y_vals[new_len - i - 1])); + } + + return results; +} + +/* +https://research.nvidia.com/labs/toronto-ai/AlignYourSteps/howto.html +*/ +struct AYSSchedule : SigmaSchedule { + std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) { + const std::vector noise_levels[] = { + /* SD1.5 */ + {14.6146412293f, 6.4745760956f, 3.8636745985f, 2.6946151520f, + 1.8841921177f, 1.3943805092f, 0.9642583904f, 0.6523686016f, + 0.3977456272f, 0.1515232662f, 0.0291671582f}, + /* SDXL */ + {14.6146412293f, 6.3184485287f, 3.7681790315f, 2.1811480769f, + 1.3405244945f, 0.8620721141f, 0.5550693289f, 0.3798540708f, + 0.2332364134f, 0.1114188177f, 0.0291671582f}, + /* SVD */ + {700.00f, 54.5f, 15.886f, 7.977f, 4.248f, 1.789f, 0.981f, 0.403f, + 0.173f, 0.034f, 0.002f}, + }; + + std::vector inputs; + std::vector results(n + 1); + + if (sd_version_is_sd2((SDVersion)version)) { + LOG_WARN("AYS not designed for SD2.X models"); + } /* fallthrough */ + else if (sd_version_is_sd1((SDVersion)version)) { + LOG_INFO("AYS using SD1.5 noise levels"); + inputs = noise_levels[0]; + } else if (sd_version_is_sdxl((SDVersion)version)) { + LOG_INFO("AYS using SDXL noise levels"); + inputs = noise_levels[1]; + } else if (version == VERSION_SVD) { + LOG_INFO("AYS using SVD noise levels"); + inputs = noise_levels[2]; + } else { + LOG_ERROR("Version not compatible with AYS scheduler"); + return results; + } + + /* Stretches those pre-calculated reference levels out to the desired + * size using log-linear interpolation */ + if ((n + 1) != inputs.size()) { + results = log_linear_interpolation(inputs, n + 1); + } else { + results = inputs; + } + + /* Not sure if this is strictly neccessary */ + results[n] = 0.0f; + + return results; + } +}; + +/* + * GITS Scheduler: https://github.com/zju-pi/diff-sampler/tree/main/gits-main + */ +struct GITSSchedule : SigmaSchedule { + std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) { + if (sigma_max <= 0.0f) { + return std::vector{}; + } + + std::vector sigmas; + + // Assume coeff is provided (replace 1.20 with your dynamic coeff) + float coeff = 1.20f; // Default coefficient + // Normalize coeff to the closest value in the array (0.80 to 1.50) + coeff = std::round(coeff * 20.0f) / 20.0f; // Round to the nearest 0.05 + // Calculate the index based on the coefficient + int index = static_cast((coeff - 0.80f) / 0.05f); + // Ensure the index is within bounds + index = std::max(0, std::min(index, static_cast(GITS_NOISE.size() - 1))); + const std::vector>& selected_noise = *GITS_NOISE[index]; + + if (n <= 20) { + sigmas = (selected_noise)[n - 2]; + } else { + sigmas = log_linear_interpolation(selected_noise.back(), n + 1); + } + + sigmas[n] = 0.0f; + return sigmas; + } +}; + +struct KarrasSchedule : SigmaSchedule { + std::vector get_sigmas(uint32_t n, float sigma_min, float sigma_max, t_to_sigma_t t_to_sigma) { + // These *COULD* be function arguments here, + // but does anybody ever bother to touch them? + float rho = 7.f; + + std::vector result(n + 1); + + float min_inv_rho = pow(sigma_min, (1.f / rho)); + float max_inv_rho = pow(sigma_max, (1.f / rho)); + for (uint32_t i = 0; i < n; i++) { + // Eq. (5) from Karras et al 2022 + result[i] = pow(max_inv_rho + (float)i / ((float)n - 1.f) * (min_inv_rho - max_inv_rho), rho); + } + result[n] = 0.; + return result; + } +}; + +struct Denoiser { + std::shared_ptr schedule = std::make_shared(); + virtual float sigma_min() = 0; + virtual float sigma_max() = 0; + virtual float sigma_to_t(float sigma) = 0; + virtual float t_to_sigma(float t) = 0; + virtual std::vector get_scalings(float sigma) = 0; + virtual ggml_tensor* noise_scaling(float sigma, ggml_tensor* noise, ggml_tensor* latent) = 0; + virtual ggml_tensor* inverse_noise_scaling(float sigma, ggml_tensor* latent) = 0; + + virtual std::vector get_sigmas(uint32_t n) { + auto bound_t_to_sigma = std::bind(&Denoiser::t_to_sigma, this, std::placeholders::_1); + return schedule->get_sigmas(n, sigma_min(), sigma_max(), bound_t_to_sigma); + } +}; + +struct CompVisDenoiser : public Denoiser { + float sigmas[TIMESTEPS]; + float log_sigmas[TIMESTEPS]; + + float sigma_data = 1.0f; + + float sigma_min() { + return sigmas[0]; + } + + float sigma_max() { + return sigmas[TIMESTEPS - 1]; + } + + float sigma_to_t(float sigma) { + float log_sigma = std::log(sigma); + std::vector dists; + dists.reserve(TIMESTEPS); + for (float log_sigma_val : log_sigmas) { + dists.push_back(log_sigma - log_sigma_val); + } + + int low_idx = 0; + for (size_t i = 0; i < TIMESTEPS; i++) { + if (dists[i] >= 0) { + low_idx++; + } + } + low_idx = std::min(std::max(low_idx - 1, 0), TIMESTEPS - 2); + int high_idx = low_idx + 1; + + float low = log_sigmas[low_idx]; + float high = log_sigmas[high_idx]; + float w = (low - log_sigma) / (low - high); + w = std::max(0.f, std::min(1.f, w)); + float t = (1.0f - w) * low_idx + w * high_idx; + + return t; + } + + float t_to_sigma(float t) { + int low_idx = static_cast(std::floor(t)); + int high_idx = static_cast(std::ceil(t)); + float w = t - static_cast(low_idx); + float log_sigma = (1.0f - w) * log_sigmas[low_idx] + w * log_sigmas[high_idx]; + return std::exp(log_sigma); + } + + std::vector get_scalings(float sigma) { + float c_skip = 1.0f; + float c_out = -sigma; + float c_in = 1.0f / std::sqrt(sigma * sigma + sigma_data * sigma_data); + return {c_skip, c_out, c_in}; + } + + // this function will modify noise/latent + ggml_tensor* noise_scaling(float sigma, ggml_tensor* noise, ggml_tensor* latent) { + ggml_tensor_scale(noise, sigma); + ggml_tensor_add(latent, noise); + return latent; + } + + ggml_tensor* inverse_noise_scaling(float sigma, ggml_tensor* latent) { + return latent; + } +}; + +struct CompVisVDenoiser : public CompVisDenoiser { + std::vector get_scalings(float sigma) { + float c_skip = sigma_data * sigma_data / (sigma * sigma + sigma_data * sigma_data); + float c_out = -sigma * sigma_data / std::sqrt(sigma * sigma + sigma_data * sigma_data); + float c_in = 1.0f / std::sqrt(sigma * sigma + sigma_data * sigma_data); + return {c_skip, c_out, c_in}; + } +}; + +struct EDMVDenoiser : public CompVisVDenoiser { + float min_sigma = 0.002; + float max_sigma = 120.0; + + EDMVDenoiser(float min_sigma = 0.002, float max_sigma = 120.0) + : min_sigma(min_sigma), max_sigma(max_sigma) { + schedule = std::make_shared(); + } + + float t_to_sigma(float t) { + return std::exp(t * 4 / (float)TIMESTEPS); + } + + float sigma_to_t(float s) { + return 0.25 * std::log(s); + } + + float sigma_min() { + return min_sigma; + } + + float sigma_max() { + return max_sigma; + } +}; + +float time_snr_shift(float alpha, float t) { + if (alpha == 1.0f) { + return t; + } + return alpha * t / (1 + (alpha - 1) * t); +} + +struct DiscreteFlowDenoiser : public Denoiser { + float sigmas[TIMESTEPS]; + float shift = 3.0f; + + float sigma_data = 1.0f; + + DiscreteFlowDenoiser() { + set_parameters(); + } + + void set_parameters() { + for (int i = 1; i < TIMESTEPS + 1; i++) { + sigmas[i - 1] = t_to_sigma(i); + } + } + + float sigma_min() { + return sigmas[0]; + } + + float sigma_max() { + return sigmas[TIMESTEPS - 1]; + } + + float sigma_to_t(float sigma) { + return sigma * 1000.f; + } + + float t_to_sigma(float t) { + t = t + 1; + return time_snr_shift(shift, t / 1000.f); + } + + std::vector get_scalings(float sigma) { + float c_skip = 1.0f; + float c_out = -sigma; + float c_in = 1.0f; + return {c_skip, c_out, c_in}; + } + + // this function will modify noise/latent + ggml_tensor* noise_scaling(float sigma, ggml_tensor* noise, ggml_tensor* latent) { + ggml_tensor_scale(noise, sigma); + ggml_tensor_scale(latent, 1.0f - sigma); + ggml_tensor_add(latent, noise); + return latent; + } + + ggml_tensor* inverse_noise_scaling(float sigma, ggml_tensor* latent) { + ggml_tensor_scale(latent, 1.0f / (1.0f - sigma)); + return latent; + } +}; + +float flux_time_shift(float mu, float sigma, float t) { + return std::exp(mu) / (std::exp(mu) + std::pow((1.0 / t - 1.0), sigma)); +} + +struct FluxFlowDenoiser : public Denoiser { + float sigmas[TIMESTEPS]; + float shift = 1.15f; + + float sigma_data = 1.0f; + + FluxFlowDenoiser(float shift = 1.15f) { + set_parameters(shift); + } + + void set_parameters(float shift = 1.15f) { + this->shift = shift; + for (int i = 1; i < TIMESTEPS + 1; i++) { + sigmas[i - 1] = t_to_sigma(i / TIMESTEPS * TIMESTEPS); + } + } + + float sigma_min() { + return sigmas[0]; + } + + float sigma_max() { + return sigmas[TIMESTEPS - 1]; + } + + float sigma_to_t(float sigma) { + return sigma; + } + + float t_to_sigma(float t) { + t = t + 1; + return flux_time_shift(shift, 1.0f, t / TIMESTEPS); + } + + std::vector get_scalings(float sigma) { + float c_skip = 1.0f; + float c_out = -sigma; + float c_in = 1.0f; + return {c_skip, c_out, c_in}; + } + + // this function will modify noise/latent + ggml_tensor* noise_scaling(float sigma, ggml_tensor* noise, ggml_tensor* latent) { + ggml_tensor_scale(noise, sigma); + ggml_tensor_scale(latent, 1.0f - sigma); + ggml_tensor_add(latent, noise); + return latent; + } + + ggml_tensor* inverse_noise_scaling(float sigma, ggml_tensor* latent) { + ggml_tensor_scale(latent, 1.0f / (1.0f - sigma)); + return latent; + } +}; + +typedef std::function denoise_cb_t; + +// k diffusion reverse ODE: dx = (x - D(x;\sigma)) / \sigma dt; \sigma(t) = t +static void sample_k_diffusion(sample_method_t method, + denoise_cb_t model, + ggml_context* work_ctx, + ggml_tensor* x, + std::vector sigmas, + std::shared_ptr rng, + float eta) { + size_t steps = sigmas.size() - 1; + // sample_euler_ancestral + switch (method) { + case EULER_A: { + struct ggml_tensor* noise = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + float sigma = sigmas[i]; + + // denoise + ggml_tensor* denoised = model(x, sigma, i + 1); + + // d = (x - denoised) / sigma + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + + for (int i = 0; i < ggml_nelements(d); i++) { + vec_d[i] = (vec_x[i] - vec_denoised[i]) / sigma; + } + } + + // get_ancestral_step + float sigma_up = std::min(sigmas[i + 1], + std::sqrt(sigmas[i + 1] * sigmas[i + 1] * (sigmas[i] * sigmas[i] - sigmas[i + 1] * sigmas[i + 1]) / (sigmas[i] * sigmas[i]))); + float sigma_down = std::sqrt(sigmas[i + 1] * sigmas[i + 1] - sigma_up * sigma_up); + + // Euler method + float dt = sigma_down - sigmas[i]; + // x = x + d * dt + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + + for (int i = 0; i < ggml_nelements(x); i++) { + vec_x[i] = vec_x[i] + vec_d[i] * dt; + } + } + + if (sigmas[i + 1] > 0) { + // x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + ggml_tensor_set_f32_randn(noise, rng); + // noise = load_tensor_from_file(work_ctx, "./rand" + std::to_string(i+1) + ".bin"); + { + float* vec_x = (float*)x->data; + float* vec_noise = (float*)noise->data; + + for (int i = 0; i < ggml_nelements(x); i++) { + vec_x[i] = vec_x[i] + vec_noise[i] * sigma_up; + } + } + } + } + } break; + case EULER: // Implemented without any sigma churn + { + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + float sigma = sigmas[i]; + + // denoise + ggml_tensor* denoised = model(x, sigma, i + 1); + + // d = (x - denoised) / sigma + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + + for (int j = 0; j < ggml_nelements(d); j++) { + vec_d[j] = (vec_x[j] - vec_denoised[j]) / sigma; + } + } + + float dt = sigmas[i + 1] - sigma; + // x = x + d * dt + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = vec_x[j] + vec_d[j] * dt; + } + } + } + } break; + case HEUN: { + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* x2 = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + // denoise + ggml_tensor* denoised = model(x, sigmas[i], -(i + 1)); + + // d = (x - denoised) / sigma + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_d[j] = (vec_x[j] - vec_denoised[j]) / sigmas[i]; + } + } + + float dt = sigmas[i + 1] - sigmas[i]; + if (sigmas[i + 1] == 0) { + // Euler step + // x = x + d * dt + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = vec_x[j] + vec_d[j] * dt; + } + } else { + // Heun step + float* vec_d = (float*)d->data; + float* vec_d2 = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_x2 = (float*)x2->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x2[j] = vec_x[j] + vec_d[j] * dt; + } + + ggml_tensor* denoised = model(x2, sigmas[i + 1], i + 1); + float* vec_denoised = (float*)denoised->data; + for (int j = 0; j < ggml_nelements(x); j++) { + float d2 = (vec_x2[j] - vec_denoised[j]) / sigmas[i + 1]; + vec_d[j] = (vec_d[j] + d2) / 2; + vec_x[j] = vec_x[j] + vec_d[j] * dt; + } + } + } + } break; + case DPM2: { + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* x2 = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + // denoise + ggml_tensor* denoised = model(x, sigmas[i], i + 1); + + // d = (x - denoised) / sigma + { + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_d[j] = (vec_x[j] - vec_denoised[j]) / sigmas[i]; + } + } + + if (sigmas[i + 1] == 0) { + // Euler step + // x = x + d * dt + float dt = sigmas[i + 1] - sigmas[i]; + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = vec_x[j] + vec_d[j] * dt; + } + } else { + // DPM-Solver-2 + float sigma_mid = exp(0.5f * (log(sigmas[i]) + log(sigmas[i + 1]))); + float dt_1 = sigma_mid - sigmas[i]; + float dt_2 = sigmas[i + 1] - sigmas[i]; + + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_x2 = (float*)x2->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x2[j] = vec_x[j] + vec_d[j] * dt_1; + } + + ggml_tensor* denoised = model(x2, sigma_mid, i + 1); + float* vec_denoised = (float*)denoised->data; + for (int j = 0; j < ggml_nelements(x); j++) { + float d2 = (vec_x2[j] - vec_denoised[j]) / sigma_mid; + vec_x[j] = vec_x[j] + d2 * dt_2; + } + } + } + + } break; + case DPMPP2S_A: { + struct ggml_tensor* noise = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* x2 = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + // denoise + ggml_tensor* denoised = model(x, sigmas[i], i + 1); + + // get_ancestral_step + float sigma_up = std::min(sigmas[i + 1], + std::sqrt(sigmas[i + 1] * sigmas[i + 1] * (sigmas[i] * sigmas[i] - sigmas[i + 1] * sigmas[i + 1]) / (sigmas[i] * sigmas[i]))); + float sigma_down = std::sqrt(sigmas[i + 1] * sigmas[i + 1] - sigma_up * sigma_up); + auto t_fn = [](float sigma) -> float { return -log(sigma); }; + auto sigma_fn = [](float t) -> float { return exp(-t); }; + + if (sigma_down == 0) { + // Euler step + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + + for (int j = 0; j < ggml_nelements(d); j++) { + vec_d[j] = (vec_x[j] - vec_denoised[j]) / sigmas[i]; + } + + // TODO: If sigma_down == 0, isn't this wrong? + // But + // https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/sampling.py#L525 + // has this exactly the same way. + float dt = sigma_down - sigmas[i]; + for (int j = 0; j < ggml_nelements(d); j++) { + vec_x[j] = vec_x[j] + vec_d[j] * dt; + } + } else { + // DPM-Solver++(2S) + float t = t_fn(sigmas[i]); + float t_next = t_fn(sigma_down); + float h = t_next - t; + float s = t + 0.5f * h; + + float* vec_d = (float*)d->data; + float* vec_x = (float*)x->data; + float* vec_x2 = (float*)x2->data; + float* vec_denoised = (float*)denoised->data; + + // First half-step + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x2[j] = (sigma_fn(s) / sigma_fn(t)) * vec_x[j] - (exp(-h * 0.5f) - 1) * vec_denoised[j]; + } + + ggml_tensor* denoised = model(x2, sigmas[i + 1], i + 1); + + // Second half-step + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = (sigma_fn(t_next) / sigma_fn(t)) * vec_x[j] - (exp(-h) - 1) * vec_denoised[j]; + } + } + + // Noise addition + if (sigmas[i + 1] > 0) { + ggml_tensor_set_f32_randn(noise, rng); + { + float* vec_x = (float*)x->data; + float* vec_noise = (float*)noise->data; + + for (int i = 0; i < ggml_nelements(x); i++) { + vec_x[i] = vec_x[i] + vec_noise[i] * sigma_up; + } + } + } + } + } break; + case DPMPP2M: // DPM++ (2M) from Karras et al (2022) + { + struct ggml_tensor* old_denoised = ggml_dup_tensor(work_ctx, x); + + auto t_fn = [](float sigma) -> float { return -log(sigma); }; + + for (int i = 0; i < steps; i++) { + // denoise + ggml_tensor* denoised = model(x, sigmas[i], i + 1); + + float t = t_fn(sigmas[i]); + float t_next = t_fn(sigmas[i + 1]); + float h = t_next - t; + float a = sigmas[i + 1] / sigmas[i]; + float b = exp(-h) - 1.f; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + float* vec_old_denoised = (float*)old_denoised->data; + + if (i == 0 || sigmas[i + 1] == 0) { + // Simpler step for the edge cases + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = a * vec_x[j] - b * vec_denoised[j]; + } + } else { + float h_last = t - t_fn(sigmas[i - 1]); + float r = h_last / h; + for (int j = 0; j < ggml_nelements(x); j++) { + float denoised_d = (1.f + 1.f / (2.f * r)) * vec_denoised[j] - (1.f / (2.f * r)) * vec_old_denoised[j]; + vec_x[j] = a * vec_x[j] - b * denoised_d; + } + } + + // old_denoised = denoised + for (int j = 0; j < ggml_nelements(x); j++) { + vec_old_denoised[j] = vec_denoised[j]; + } + } + } break; + case DPMPP2Mv2: // Modified DPM++ (2M) from https://github.com/AUTOMATIC1111/stable-diffusion-webui/discussions/8457 + { + struct ggml_tensor* old_denoised = ggml_dup_tensor(work_ctx, x); + + auto t_fn = [](float sigma) -> float { return -log(sigma); }; + + for (int i = 0; i < steps; i++) { + // denoise + ggml_tensor* denoised = model(x, sigmas[i], i + 1); + + float t = t_fn(sigmas[i]); + float t_next = t_fn(sigmas[i + 1]); + float h = t_next - t; + float a = sigmas[i + 1] / sigmas[i]; + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + float* vec_old_denoised = (float*)old_denoised->data; + + if (i == 0 || sigmas[i + 1] == 0) { + // Simpler step for the edge cases + float b = exp(-h) - 1.f; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = a * vec_x[j] - b * vec_denoised[j]; + } + } else { + float h_last = t - t_fn(sigmas[i - 1]); + float h_min = std::min(h_last, h); + float h_max = std::max(h_last, h); + float r = h_max / h_min; + float h_d = (h_max + h_min) / 2.f; + float b = exp(-h_d) - 1.f; + for (int j = 0; j < ggml_nelements(x); j++) { + float denoised_d = (1.f + 1.f / (2.f * r)) * vec_denoised[j] - (1.f / (2.f * r)) * vec_old_denoised[j]; + vec_x[j] = a * vec_x[j] - b * denoised_d; + } + } + + // old_denoised = denoised + for (int j = 0; j < ggml_nelements(x); j++) { + vec_old_denoised[j] = vec_denoised[j]; + } + } + } break; + case IPNDM: // iPNDM sampler from https://github.com/zju-pi/diff-sampler/tree/main/diff-solvers-main + { + int max_order = 4; + ggml_tensor* x_next = x; + std::vector buffer_model; + + for (int i = 0; i < steps; i++) { + float sigma = sigmas[i]; + float sigma_next = sigmas[i + 1]; + + ggml_tensor* x_cur = x_next; + float* vec_x_cur = (float*)x_cur->data; + float* vec_x_next = (float*)x_next->data; + + // Denoising step + ggml_tensor* denoised = model(x_cur, sigma, i + 1); + float* vec_denoised = (float*)denoised->data; + // d_cur = (x_cur - denoised) / sigma + struct ggml_tensor* d_cur = ggml_dup_tensor(work_ctx, x_cur); + float* vec_d_cur = (float*)d_cur->data; + + for (int j = 0; j < ggml_nelements(d_cur); j++) { + vec_d_cur[j] = (vec_x_cur[j] - vec_denoised[j]) / sigma; + } + + int order = std::min(max_order, i + 1); + + // Calculate vec_x_next based on the order + switch (order) { + case 1: // First Euler step + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x_next[j] = vec_x_cur[j] + (sigma_next - sigma) * vec_d_cur[j]; + } + break; + + case 2: // Use one history point + { + float* vec_d_prev1 = (float*)buffer_model.back()->data; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x_next[j] = vec_x_cur[j] + (sigma_next - sigma) * (3 * vec_d_cur[j] - vec_d_prev1[j]) / 2; + } + } break; + + case 3: // Use two history points + { + float* vec_d_prev1 = (float*)buffer_model.back()->data; + float* vec_d_prev2 = (float*)buffer_model[buffer_model.size() - 2]->data; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x_next[j] = vec_x_cur[j] + (sigma_next - sigma) * (23 * vec_d_cur[j] - 16 * vec_d_prev1[j] + 5 * vec_d_prev2[j]) / 12; + } + } break; + + case 4: // Use three history points + { + float* vec_d_prev1 = (float*)buffer_model.back()->data; + float* vec_d_prev2 = (float*)buffer_model[buffer_model.size() - 2]->data; + float* vec_d_prev3 = (float*)buffer_model[buffer_model.size() - 3]->data; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x_next[j] = vec_x_cur[j] + (sigma_next - sigma) * (55 * vec_d_cur[j] - 59 * vec_d_prev1[j] + 37 * vec_d_prev2[j] - 9 * vec_d_prev3[j]) / 24; + } + } break; + } + + // Manage buffer_model + if (buffer_model.size() == max_order - 1) { + // Shift elements to the left + for (int k = 0; k < max_order - 2; k++) { + buffer_model[k] = buffer_model[k + 1]; + } + buffer_model.back() = d_cur; // Replace the last element with d_cur + } else { + buffer_model.push_back(d_cur); + } + } + } break; + case IPNDM_V: // iPNDM_v sampler from https://github.com/zju-pi/diff-sampler/tree/main/diff-solvers-main + { + int max_order = 4; + std::vector buffer_model; + ggml_tensor* x_next = x; + + for (int i = 0; i < steps; i++) { + float sigma = sigmas[i]; + float t_next = sigmas[i + 1]; + + // Denoising step + ggml_tensor* denoised = model(x, sigma, i + 1); + float* vec_denoised = (float*)denoised->data; + struct ggml_tensor* d_cur = ggml_dup_tensor(work_ctx, x); + float* vec_d_cur = (float*)d_cur->data; + float* vec_x = (float*)x->data; + + // d_cur = (x - denoised) / sigma + for (int j = 0; j < ggml_nelements(d_cur); j++) { + vec_d_cur[j] = (vec_x[j] - vec_denoised[j]) / sigma; + } + + int order = std::min(max_order, i + 1); + float h_n = t_next - sigma; + float h_n_1 = (i > 0) ? (sigma - sigmas[i - 1]) : h_n; + + switch (order) { + case 1: // First Euler step + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x[j] += vec_d_cur[j] * h_n; + } + break; + + case 2: { + float* vec_d_prev1 = (float*)buffer_model.back()->data; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x[j] += h_n * ((2 + (h_n / h_n_1)) * vec_d_cur[j] - (h_n / h_n_1) * vec_d_prev1[j]) / 2; + } + break; + } + + case 3: { + float h_n_2 = (i > 1) ? (sigmas[i - 1] - sigmas[i - 2]) : h_n_1; + float* vec_d_prev1 = (float*)buffer_model.back()->data; + float* vec_d_prev2 = (buffer_model.size() > 1) ? (float*)buffer_model[buffer_model.size() - 2]->data : vec_d_prev1; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x[j] += h_n * ((23 * vec_d_cur[j] - 16 * vec_d_prev1[j] + 5 * vec_d_prev2[j]) / 12); + } + break; + } + + case 4: { + float h_n_2 = (i > 1) ? (sigmas[i - 1] - sigmas[i - 2]) : h_n_1; + float h_n_3 = (i > 2) ? (sigmas[i - 2] - sigmas[i - 3]) : h_n_2; + float* vec_d_prev1 = (float*)buffer_model.back()->data; + float* vec_d_prev2 = (buffer_model.size() > 1) ? (float*)buffer_model[buffer_model.size() - 2]->data : vec_d_prev1; + float* vec_d_prev3 = (buffer_model.size() > 2) ? (float*)buffer_model[buffer_model.size() - 3]->data : vec_d_prev2; + for (int j = 0; j < ggml_nelements(x_next); j++) { + vec_x[j] += h_n * ((55 * vec_d_cur[j] - 59 * vec_d_prev1[j] + 37 * vec_d_prev2[j] - 9 * vec_d_prev3[j]) / 24); + } + break; + } + } + + // Manage buffer_model + if (buffer_model.size() == max_order - 1) { + buffer_model.erase(buffer_model.begin()); + } + buffer_model.push_back(d_cur); + + // Prepare the next d tensor + d_cur = ggml_dup_tensor(work_ctx, x_next); + } + } break; + case LCM: // Latent Consistency Models + { + struct ggml_tensor* noise = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* d = ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + float sigma = sigmas[i]; + + // denoise + ggml_tensor* denoised = model(x, sigma, i + 1); + + // x = denoised + { + float* vec_x = (float*)x->data; + float* vec_denoised = (float*)denoised->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = vec_denoised[j]; + } + } + + if (sigmas[i + 1] > 0) { + // x += sigmas[i + 1] * noise_sampler(sigmas[i], sigmas[i + 1]) + ggml_tensor_set_f32_randn(noise, rng); + // noise = load_tensor_from_file(res_ctx, "./rand" + std::to_string(i+1) + ".bin"); + { + float* vec_x = (float*)x->data; + float* vec_noise = (float*)noise->data; + + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] = vec_x[j] + sigmas[i + 1] * vec_noise[j]; + } + } + } + } + } break; + case DDIM_TRAILING: // Denoising Diffusion Implicit Models + // with the "trailing" timestep spacing + { + // See J. Song et al., "Denoising Diffusion Implicit + // Models", arXiv:2010.02502 [cs.LG] + // + // DDIM itself needs alphas_cumprod (DDPM, J. Ho et al., + // arXiv:2006.11239 [cs.LG] with k-diffusion's start and + // end beta) (which unfortunately k-diffusion's data + // structure hides from the denoiser), and the sigmas are + // also needed to invert the behavior of CompVisDenoiser + // (k-diffusion's LMSDiscreteScheduler) + float beta_start = 0.00085f; + float beta_end = 0.0120f; + std::vector alphas_cumprod; + std::vector compvis_sigmas; + + alphas_cumprod.reserve(TIMESTEPS); + compvis_sigmas.reserve(TIMESTEPS); + for (int i = 0; i < TIMESTEPS; i++) { + alphas_cumprod[i] = + (i == 0 ? 1.0f : alphas_cumprod[i - 1]) * + (1.0f - + std::pow(sqrtf(beta_start) + + (sqrtf(beta_end) - sqrtf(beta_start)) * + ((float)i / (TIMESTEPS - 1)), + 2)); + compvis_sigmas[i] = + std::sqrt((1 - alphas_cumprod[i]) / + alphas_cumprod[i]); + } + + struct ggml_tensor* pred_original_sample = + ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* variance_noise = + ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + // The "trailing" DDIM timestep, see S. Lin et al., + // "Common Diffusion Noise Schedules and Sample Steps + // are Flawed", arXiv:2305.08891 [cs], p. 4, Table + // 2. Most variables below follow Diffusers naming + // + // Diffuser naming vs. Song et al. (2010), p. 5, (12) + // and p. 16, (16) ( -> ): + // + // - pred_noise_t -> epsilon_theta^(t)(x_t) + // - pred_original_sample -> f_theta^(t)(x_t) or x_0 + // - std_dev_t -> sigma_t (not the LMS sigma) + // - eta -> eta (set to 0 at the moment) + // - pred_sample_direction -> "direction pointing to + // x_t" + // - pred_prev_sample -> "x_t-1" + int timestep = + roundf(TIMESTEPS - + i * ((float)TIMESTEPS / steps)) - + 1; + // 1. get previous step value (=t-1) + int prev_timestep = timestep - TIMESTEPS / steps; + // The sigma here is chosen to cause the + // CompVisDenoiser to produce t = timestep + float sigma = compvis_sigmas[timestep]; + if (i == 0) { + // The function add_noise intializes x to + // Diffusers' latents * sigma (as in Diffusers' + // pipeline) or sample * sigma (Diffusers' + // scheduler), where this sigma = init_noise_sigma + // in Diffusers. For DDPM and DDIM however, + // init_noise_sigma = 1. But the k-diffusion + // model() also evaluates F_theta(c_in(sigma) x; + // ...) instead of the bare U-net F_theta, with + // c_in = 1 / sqrt(sigma^2 + 1), as defined in + // T. Karras et al., "Elucidating the Design Space + // of Diffusion-Based Generative Models", + // arXiv:2206.00364 [cs.CV], p. 3, Table 1. Hence + // the first call has to be prescaled as x <- x / + // (c_in * sigma) with the k-diffusion pipeline + // and CompVisDenoiser. + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] *= std::sqrt(sigma * sigma + 1) / + sigma; + } + } else { + // For the subsequent steps after the first one, + // at this point x = latents or x = sample, and + // needs to be prescaled with x <- sample / c_in + // to compensate for model() applying the scale + // c_in before the U-net F_theta + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] *= std::sqrt(sigma * sigma + 1); + } + } + // Note (also noise_pred in Diffuser's pipeline) + // model_output = model() is the D(x, sigma) as + // defined in Karras et al. (2022), p. 3, Table 1 and + // p. 8 (7), compare also p. 38 (226) therein. + struct ggml_tensor* model_output = + model(x, sigma, i + 1); + // Here model_output is still the k-diffusion denoiser + // output, not the U-net output F_theta(c_in(sigma) x; + // ...) in Karras et al. (2022), whereas Diffusers' + // model_output is F_theta(...). Recover the actual + // model_output, which is also referred to as the + // "Karras ODE derivative" d or d_cur in several + // samplers above. + { + float* vec_x = (float*)x->data; + float* vec_model_output = + (float*)model_output->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_model_output[j] = + (vec_x[j] - vec_model_output[j]) * + (1 / sigma); + } + } + // 2. compute alphas, betas + float alpha_prod_t = alphas_cumprod[timestep]; + // Note final_alpha_cumprod = alphas_cumprod[0] due to + // trailing timestep spacing + float alpha_prod_t_prev = prev_timestep >= 0 ? alphas_cumprod[prev_timestep] : alphas_cumprod[0]; + float beta_prod_t = 1 - alpha_prod_t; + // 3. compute predicted original sample from predicted + // noise also called "predicted x_0" of formula (12) + // from https://arxiv.org/pdf/2010.02502.pdf + { + float* vec_x = (float*)x->data; + float* vec_model_output = + (float*)model_output->data; + float* vec_pred_original_sample = + (float*)pred_original_sample->data; + // Note the substitution of latents or sample = x + // * c_in = x / sqrt(sigma^2 + 1) + for (int j = 0; j < ggml_nelements(x); j++) { + vec_pred_original_sample[j] = + (vec_x[j] / std::sqrt(sigma * sigma + 1) - + std::sqrt(beta_prod_t) * + vec_model_output[j]) * + (1 / std::sqrt(alpha_prod_t)); + } + } + // Assuming the "epsilon" prediction type, where below + // pred_epsilon = model_output is inserted, and is not + // defined/copied explicitly. + // + // 5. compute variance: "sigma_t(eta)" -> see formula + // (16) + // + // sigma_t = sqrt((1 - alpha_t-1)/(1 - alpha_t)) * + // sqrt(1 - alpha_t/alpha_t-1) + float beta_prod_t_prev = 1 - alpha_prod_t_prev; + float variance = (beta_prod_t_prev / beta_prod_t) * + (1 - alpha_prod_t / alpha_prod_t_prev); + float std_dev_t = eta * std::sqrt(variance); + // 6. compute "direction pointing to x_t" of formula + // (12) from https://arxiv.org/pdf/2010.02502.pdf + // 7. compute x_t without "random noise" of formula + // (12) from https://arxiv.org/pdf/2010.02502.pdf + { + float* vec_model_output = (float*)model_output->data; + float* vec_pred_original_sample = + (float*)pred_original_sample->data; + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + // Two step inner loop without an explicit + // tensor + float pred_sample_direction = + std::sqrt(1 - alpha_prod_t_prev - + std::pow(std_dev_t, 2)) * + vec_model_output[j]; + vec_x[j] = std::sqrt(alpha_prod_t_prev) * + vec_pred_original_sample[j] + + pred_sample_direction; + } + } + if (eta > 0) { + ggml_tensor_set_f32_randn(variance_noise, rng); + float* vec_variance_noise = + (float*)variance_noise->data; + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] += std_dev_t * vec_variance_noise[j]; + } + } + // See the note above: x = latents or sample here, and + // is not scaled by the c_in. For the final output + // this is correct, but for subsequent iterations, x + // needs to be prescaled again, since k-diffusion's + // model() differes from the bare U-net F_theta by the + // factor c_in. + } + } break; + case TCD: // Strategic Stochastic Sampling (Algorithm 4) in + // Trajectory Consistency Distillation + { + // See J. Zheng et al., "Trajectory Consistency + // Distillation: Improved Latent Consistency Distillation + // by Semi-Linear Consistency Function with Trajectory + // Mapping", arXiv:2402.19159 [cs.CV] + float beta_start = 0.00085f; + float beta_end = 0.0120f; + std::vector alphas_cumprod; + std::vector compvis_sigmas; + + alphas_cumprod.reserve(TIMESTEPS); + compvis_sigmas.reserve(TIMESTEPS); + for (int i = 0; i < TIMESTEPS; i++) { + alphas_cumprod[i] = + (i == 0 ? 1.0f : alphas_cumprod[i - 1]) * + (1.0f - + std::pow(sqrtf(beta_start) + + (sqrtf(beta_end) - sqrtf(beta_start)) * + ((float)i / (TIMESTEPS - 1)), + 2)); + compvis_sigmas[i] = + std::sqrt((1 - alphas_cumprod[i]) / + alphas_cumprod[i]); + } + int original_steps = 50; + + struct ggml_tensor* pred_original_sample = + ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* noise = + ggml_dup_tensor(work_ctx, x); + + for (int i = 0; i < steps; i++) { + // Analytic form for TCD timesteps + int timestep = TIMESTEPS - 1 - + (TIMESTEPS / original_steps) * + (int)floor(i * ((float)original_steps / steps)); + // 1. get previous step value + int prev_timestep = i >= steps - 1 ? 0 : TIMESTEPS - 1 - (TIMESTEPS / original_steps) * (int)floor((i + 1) * ((float)original_steps / steps)); + // Here timestep_s is tau_n' in Algorithm 4. The _s + // notation appears to be that from C. Lu, + // "DPM-Solver: A Fast ODE Solver for Diffusion + // Probabilistic Model Sampling in Around 10 Steps", + // arXiv:2206.00927 [cs.LG], but this notation is not + // continued in Algorithm 4, where _n' is used. + int timestep_s = + (int)floor((1 - eta) * prev_timestep); + // Begin k-diffusion specific workaround for + // evaluating F_theta(x; ...) from D(x, sigma), same + // as in DDIM (and see there for detailed comments) + float sigma = compvis_sigmas[timestep]; + if (i == 0) { + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] *= std::sqrt(sigma * sigma + 1) / + sigma; + } + } else { + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_x[j] *= std::sqrt(sigma * sigma + 1); + } + } + struct ggml_tensor* model_output = + model(x, sigma, i + 1); + { + float* vec_x = (float*)x->data; + float* vec_model_output = + (float*)model_output->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_model_output[j] = + (vec_x[j] - vec_model_output[j]) * + (1 / sigma); + } + } + // 2. compute alphas, betas + // + // When comparing TCD with DDPM/DDIM note that Zheng + // et al. (2024) follows the DPM-Solver notation for + // alpha. One can find the following comment in the + // original DPM-Solver code + // (https://github.com/LuChengTHU/dpm-solver/): + // "**Important**: Please pay special attention for + // the args for `alphas_cumprod`: The `alphas_cumprod` + // is the \hat{alpha_n} arrays in the notations of + // DDPM. [...] Therefore, the notation \hat{alpha_n} + // is different from the notation alpha_t in + // DPM-Solver. In fact, we have alpha_{t_n} = + // \sqrt{\hat{alpha_n}}, [...]" + float alpha_prod_t = alphas_cumprod[timestep]; + float beta_prod_t = 1 - alpha_prod_t; + // Note final_alpha_cumprod = alphas_cumprod[0] since + // TCD is always "trailing" + float alpha_prod_t_prev = prev_timestep >= 0 ? alphas_cumprod[prev_timestep] : alphas_cumprod[0]; + // The subscript _s are the only portion in this + // section (2) unique to TCD + float alpha_prod_s = alphas_cumprod[timestep_s]; + float beta_prod_s = 1 - alpha_prod_s; + // 3. Compute the predicted noised sample x_s based on + // the model parameterization + // + // This section is also exactly the same as DDIM + { + float* vec_x = (float*)x->data; + float* vec_model_output = + (float*)model_output->data; + float* vec_pred_original_sample = + (float*)pred_original_sample->data; + for (int j = 0; j < ggml_nelements(x); j++) { + vec_pred_original_sample[j] = + (vec_x[j] / std::sqrt(sigma * sigma + 1) - + std::sqrt(beta_prod_t) * + vec_model_output[j]) * + (1 / std::sqrt(alpha_prod_t)); + } + } + // This consistency function step can be difficult to + // decipher from Algorithm 4, as it is simply stated + // using a consistency function. This step is the + // modified DDIM, i.e. p. 8 (32) in Zheng et + // al. (2024), with eta set to 0 (see the paragraph + // immediately thereafter that states this somewhat + // obliquely). + { + float* vec_pred_original_sample = + (float*)pred_original_sample->data; + float* vec_model_output = + (float*)model_output->data; + float* vec_x = (float*)x->data; + for (int j = 0; j < ggml_nelements(x); j++) { + // Substituting x = pred_noised_sample and + // pred_epsilon = model_output + vec_x[j] = + std::sqrt(alpha_prod_s) * + vec_pred_original_sample[j] + + std::sqrt(beta_prod_s) * + vec_model_output[j]; + } + } + // 4. Sample and inject noise z ~ N(0, I) for + // MultiStep Inference Noise is not used on the final + // timestep of the timestep schedule. This also means + // that noise is not used for one-step sampling. Eta + // (referred to as "gamma" in the paper) was + // introduced to control the stochasticity in every + // step. When eta = 0, it represents deterministic + // sampling, whereas eta = 1 indicates full stochastic + // sampling. + if (eta > 0 && i != steps - 1) { + // In this case, x is still pred_noised_sample, + // continue in-place + ggml_tensor_set_f32_randn(noise, rng); + float* vec_x = (float*)x->data; + float* vec_noise = (float*)noise->data; + for (int j = 0; j < ggml_nelements(x); j++) { + // Corresponding to (35) in Zheng et + // al. (2024), substituting x = + // pred_noised_sample + vec_x[j] = + std::sqrt(alpha_prod_t_prev / + alpha_prod_s) * + vec_x[j] + + std::sqrt(1 - alpha_prod_t_prev / + alpha_prod_s) * + vec_noise[j]; + } + } + } + } break; + + default: + LOG_ERROR("Attempting to sample with nonexisting sample method %i", method); + abort(); + } +} + +#endif // __DENOISER_HPP__ diff --git a/diffusion_model.hpp b/diffusion_model.hpp new file mode 100644 index 000000000..5c349439d --- /dev/null +++ b/diffusion_model.hpp @@ -0,0 +1,187 @@ +#ifndef __DIFFUSION_MODEL_H__ +#define __DIFFUSION_MODEL_H__ + +#include "flux.hpp" +#include "mmdit.hpp" +#include "unet.hpp" + +struct DiffusionModel { + virtual void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + int num_video_frames = -1, + std::vector controls = {}, + float control_strength = 0.f, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) = 0; + virtual void alloc_params_buffer() = 0; + virtual void free_params_buffer() = 0; + virtual void free_compute_buffer() = 0; + virtual void get_param_tensors(std::map& tensors) = 0; + virtual size_t get_params_buffer_size() = 0; + virtual int64_t get_adm_in_channels() = 0; +}; + +struct UNetModel : public DiffusionModel { + UNetModelRunner unet; + + UNetModel(ggml_backend_t backend, + std::map& tensor_types, + SDVersion version = VERSION_SD1, + bool flash_attn = false) + : unet(backend, tensor_types, "model.diffusion_model", version, flash_attn) { + } + + void alloc_params_buffer() { + unet.alloc_params_buffer(); + } + + void free_params_buffer() { + unet.free_params_buffer(); + } + + void free_compute_buffer() { + unet.free_compute_buffer(); + } + + void get_param_tensors(std::map& tensors) { + unet.get_param_tensors(tensors, "model.diffusion_model"); + } + + size_t get_params_buffer_size() { + return unet.get_params_buffer_size(); + } + + int64_t get_adm_in_channels() { + return unet.unet.adm_in_channels; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + int num_video_frames = -1, + std::vector controls = {}, + float control_strength = 0.f, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) { + (void)skip_layers; // SLG doesn't work with UNet models + return unet.compute(n_threads, x, timesteps, context, c_concat, y, num_video_frames, controls, control_strength, output, output_ctx); + } +}; + +struct MMDiTModel : public DiffusionModel { + MMDiTRunner mmdit; + + MMDiTModel(ggml_backend_t backend, + std::map& tensor_types) + : mmdit(backend, tensor_types, "model.diffusion_model") { + } + + void alloc_params_buffer() { + mmdit.alloc_params_buffer(); + } + + void free_params_buffer() { + mmdit.free_params_buffer(); + } + + void free_compute_buffer() { + mmdit.free_compute_buffer(); + } + + void get_param_tensors(std::map& tensors) { + mmdit.get_param_tensors(tensors, "model.diffusion_model"); + } + + size_t get_params_buffer_size() { + return mmdit.get_params_buffer_size(); + } + + int64_t get_adm_in_channels() { + return 768 + 1280; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + int num_video_frames = -1, + std::vector controls = {}, + float control_strength = 0.f, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) { + return mmdit.compute(n_threads, x, timesteps, context, y, output, output_ctx, skip_layers); + } +}; + +struct FluxModel : public DiffusionModel { + Flux::FluxRunner flux; + + FluxModel(ggml_backend_t backend, + std::map& tensor_types, + SDVersion version = VERSION_FLUX, + bool flash_attn = false, + bool use_mask = false) + : flux(backend, tensor_types, "model.diffusion_model", version, flash_attn, use_mask) { + } + + void alloc_params_buffer() { + flux.alloc_params_buffer(); + } + + void free_params_buffer() { + flux.free_params_buffer(); + } + + void free_compute_buffer() { + flux.free_compute_buffer(); + } + + void get_param_tensors(std::map& tensors) { + flux.get_param_tensors(tensors, "model.diffusion_model"); + } + + size_t get_params_buffer_size() { + return flux.get_params_buffer_size(); + } + + int64_t get_adm_in_channels() { + return 768; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + int num_video_frames = -1, + std::vector controls = {}, + float control_strength = 0.f, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) { + return flux.compute(n_threads, x, timesteps, context, c_concat, y, guidance, ref_latents, output, output_ctx, skip_layers); + } +}; + +#endif diff --git a/docs/chroma.md b/docs/chroma.md new file mode 100644 index 000000000..d013a43c8 --- /dev/null +++ b/docs/chroma.md @@ -0,0 +1,33 @@ +# How to Use + +You can run Chroma using stable-diffusion.cpp with a GPU that has 6GB or even 4GB of VRAM, without needing to offload to RAM. + +## Download weights + +- Download Chroma + - If you don't want to do the conversion yourself, download the preconverted gguf model from [silveroxides/Chroma-GGUF](https://huggingface.co/silveroxides/Chroma-GGUF) + - Otherwise, download chroma's safetensors from [lodestones/Chroma](https://huggingface.co/lodestones/Chroma) +- Download vae from https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/ae.safetensors +- Download t5xxl from https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/t5xxl_fp16.safetensors + +## Convert Chroma weights + +You can download the preconverted gguf weights from [silveroxides/Chroma-GGUF](https://huggingface.co/silveroxides/Chroma-GGUF), this way you don't have to do the conversion yourself. + +``` +.\bin\Release\sd.exe -M convert -m ..\..\ComfyUI\models\unet\chroma-unlocked-v40.safetensors -o ..\models\chroma-unlocked-v40-q8_0.gguf -v --type q8_0 +``` + +## Run + +### Example +For example: + +``` + .\bin\Release\sd.exe -diffusion-model ..\models\chroma-unlocked-v40-q8_0.gguf --vae ..\models\ae.sft --t5xxl ..\models\t5xxl_fp16.safetensors -p "a lovely cat holding a sign says 'chroma.cpp'" --cfg-scale 4.0 --sampling-method euler -v --chroma-disable-dit-mask +``` + +![](../assets/flux/chroma_v40.png) + + + diff --git a/docs/docker.md b/docs/docker.md new file mode 100644 index 000000000..96e9838de --- /dev/null +++ b/docs/docker.md @@ -0,0 +1,15 @@ +## Docker + +### Building using Docker + +```shell +docker build -t sd . +``` + +### Run + +```shell +docker run -v /path/to/models:/models -v /path/to/output/:/output sd [args...] +# For example +# docker run -v ./models:/models -v ./build:/output sd -m /models/sd-v1-4.ckpt -p "a lovely cat" -v -o /output/output.png +``` \ No newline at end of file diff --git a/docs/esrgan.md b/docs/esrgan.md new file mode 100644 index 000000000..21f2af4ae --- /dev/null +++ b/docs/esrgan.md @@ -0,0 +1,9 @@ +## Using ESRGAN to upscale results + +You can use ESRGAN to upscale the generated images. At the moment, only the [RealESRGAN_x4plus_anime_6B.pth](https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.2.4/RealESRGAN_x4plus_anime_6B.pth) model is supported. Support for more models of this architecture will be added soon. + +- Specify the model path using the `--upscale-model PATH` parameter. example: + +```bash +sd -m ../models/v1-5-pruned-emaonly.safetensors -p "a lovely cat" --upscale-model ../models/RealESRGAN_x4plus_anime_6B.pth +``` diff --git a/docs/flux.md b/docs/flux.md new file mode 100644 index 000000000..dafad9b0b --- /dev/null +++ b/docs/flux.md @@ -0,0 +1,66 @@ +# How to Use + +You can run Flux using stable-diffusion.cpp with a GPU that has 6GB or even 4GB of VRAM, without needing to offload to RAM. + +## Download weights + +- Download flux + - If you don't want to do the conversion yourself, download the preconverted gguf model from [FLUX.1-dev-gguf](https://huggingface.co/leejet/FLUX.1-dev-gguf) or [FLUX.1-schnell](https://huggingface.co/leejet/FLUX.1-schnell-gguf) + - Otherwise, download flux-dev from https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/flux1-dev.safetensors or flux-schnell from https://huggingface.co/black-forest-labs/FLUX.1-schnell/blob/main/flux1-schnell.safetensors +- Download vae from https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/ae.safetensors +- Download clip_l from https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors +- Download t5xxl from https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/t5xxl_fp16.safetensors + +## Convert flux weights + +You can download the preconverted gguf weights from [FLUX.1-dev-gguf](https://huggingface.co/leejet/FLUX.1-dev-gguf) or [FLUX.1-schnell](https://huggingface.co/leejet/FLUX.1-schnell-gguf), this way you don't have to do the conversion yourself. + +Using fp16 will lead to overflow, but ggml's support for bf16 is not yet fully developed. Therefore, we need to convert flux to gguf format here, which also saves VRAM. For example: +``` +.\bin\Release\sd.exe -M convert -m ..\..\ComfyUI\models\unet\flux1-dev.sft -o ..\models\flux1-dev-q8_0.gguf -v --type q8_0 +``` + +## Run + +- `--cfg-scale` is recommended to be set to 1. + +### Flux-dev +For example: + +``` + .\bin\Release\sd.exe --diffusion-model ..\models\flux1-dev-q8_0.gguf --vae ..\models\ae.sft --clip_l ..\models\clip_l.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -p "a lovely cat holding a sign says 'flux.cpp'" --cfg-scale 1.0 --sampling-method euler -v +``` + +Using formats of different precisions will yield results of varying quality. + +| Type | q8_0 | q4_0 | q4_k | q3_k | q2_k | +|---- | ---- |---- |---- |---- |---- | +| **Memory** | 12068.09 MB | 6394.53 MB | 6395.17 MB | 4888.16 MB | 3735.73 MB | +| **Result** | ![](../assets/flux/flux1-dev-q8_0.png) |![](../assets/flux/flux1-dev-q4_0.png) |![](../assets/flux/flux1-dev-q4_k.png) |![](../assets/flux/flux1-dev-q3_k.png) |![](../assets/flux/flux1-dev-q2_k.png)| + + + +### Flux-schnell + + +``` + .\bin\Release\sd.exe --diffusion-model ..\models\flux1-schnell-q8_0.gguf --vae ..\models\ae.sft --clip_l ..\models\clip_l.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -p "a lovely cat holding a sign says 'flux.cpp'" --cfg-scale 1.0 --sampling-method euler -v --steps 4 +``` + +| q8_0 | +| ---- | +|![](../assets/flux/flux1-schnell-q8_0.png) | + +## Run with LoRA + +Since many flux LoRA training libraries have used various LoRA naming formats, it is possible that not all flux LoRA naming formats are supported. It is recommended to use LoRA with naming formats compatible with ComfyUI. + +### Flux-dev q8_0 with LoRA + +- LoRA model from https://huggingface.co/XLabs-AI/flux-lora-collection/tree/main (using comfy converted version!!!) + +``` +.\bin\Release\sd.exe --diffusion-model ..\models\flux1-dev-q8_0.gguf --vae ...\models\ae.sft --clip_l ..\models\clip_l.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -p "a lovely cat holding a sign says 'flux.cpp'" --cfg-scale 1.0 --sampling-method euler -v --lora-model-dir ../models +``` + +![output](../assets/flux/flux1-dev-q8_0%20with%20lora.png) diff --git a/docs/hipBLAS_on_Windows.md b/docs/hipBLAS_on_Windows.md new file mode 100644 index 000000000..cff0aacc7 --- /dev/null +++ b/docs/hipBLAS_on_Windows.md @@ -0,0 +1,85 @@ +# Using hipBLAS on Windows + +To get hipBLAS in `stable-diffusion.cpp` working on Windows, go through this guide section by section. + +## Build Tools for Visual Studio 2022 + +Skip this step if you already have Build Tools installed. + +To install Build Tools, go to [Visual Studio Downloads](https://visualstudio.microsoft.com/vs/), download `Visual Studio 2022 and other Products` and run the installer. + +## CMake + +Skip this step if you already have CMake installed: running `cmake --version` should output `cmake version x.y.z`. + +Download latest `Windows x64 Installer` from [Download | CMake](https://cmake.org/download/) and run it. + +## ROCm + +Skip this step if you already have Build Tools installed. + +The [validation tools](https://rocm.docs.amd.com/en/latest/reference/validation_tools.html) not support on Windows. So you should confirm the Version of `ROCM` by yourself. + +Fortunately, `AMD` provides complete help documentation, you can use the help documentation to install [ROCM](https://rocm.docs.amd.com/en/latest/deploy/windows/quick_start.html) + +>**If you encounter an error, if it is [AMD ROCm Windows Installation Error 215](https://github.com/RadeonOpenCompute/ROCm/issues/2363), don't worry about this error. ROCM has been installed correctly, but the vs studio plugin installation failed, we can ignore it.** + +Then we must set `ROCM` as environment variables before running cmake. + +Usually if you install according to the official tutorial and do not modify the ROCM path, then there is a high probability that it is here `C:\Program Files\AMD\ROCm\5.5\bin` + +This is what I use to set the clang: +```Commandline +set CC=C:\Program Files\AMD\ROCm\5.5\bin\clang.exe +set CXX=C:\Program Files\AMD\ROCm\5.5\bin\clang++.exe +``` + +## Ninja + +Skip this step if you already have Ninja installed: running `ninja --version` should output `1.11.1`. + +Download latest `ninja-win.zip` from [GitHub Releases Page](https://github.com/ninja-build/ninja/releases/tag/v1.11.1) and unzip. Then set as environment variables. I unzipped it in `C:\Program Files\ninja`, so I set it like this: + +```Commandline +set ninja=C:\Program Files\ninja\ninja.exe +``` +## Building stable-diffusion.cpp + +The thing different from the regular CPU build is `-DSD_HIPBLAS=ON` , +`-G "Ninja"`, `-DCMAKE_C_COMPILER=clang`, `-DCMAKE_CXX_COMPILER=clang++`, `-DAMDGPU_TARGETS=gfx1100` + +>**Notice**: check the `clang` and `clang++` information: +```Commandline +clang --version +clang++ --version +``` + +If you see like this, we can continue: +``` +clang version 17.0.0 (git@github.amd.com:Compute-Mirrors/llvm-project e3201662d21c48894f2156d302276eb1cf47c7be) +Target: x86_64-pc-windows-msvc +Thread model: posix +InstalledDir: C:\Program Files\AMD\ROCm\5.5\bin +``` + +``` +clang version 17.0.0 (git@github.amd.com:Compute-Mirrors/llvm-project e3201662d21c48894f2156d302276eb1cf47c7be) +Target: x86_64-pc-windows-msvc +Thread model: posix +InstalledDir: C:\Program Files\AMD\ROCm\5.5\bin +``` + +>**Notice** that the `gfx1100` is the GPU architecture of my GPU, you can change it to your GPU architecture. Click here to see your architecture [LLVM Target](https://rocm.docs.amd.com/en/latest/release/windows_support.html#windows-supported-gpus) + +My GPU is AMD Radeonā„¢ RX 7900 XTX Graphics, so I set it to `gfx1100`. + +option: + +```commandline +mkdir build +cd build +cmake .. -G "Ninja" -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DSD_HIPBLAS=ON -DCMAKE_BUILD_TYPE=Release -DAMDGPU_TARGETS=gfx1100 +cmake --build . --config Release +``` + +If everything went OK, `build\bin\sd.exe` file should appear. diff --git a/docs/kontext.md b/docs/kontext.md new file mode 100644 index 000000000..519752553 --- /dev/null +++ b/docs/kontext.md @@ -0,0 +1,39 @@ +# How to Use + +You can run Kontext using stable-diffusion.cpp with a GPU that has 6GB or even 4GB of VRAM, without needing to offload to RAM. + +## Download weights + +- Download Kontext + - If you don't want to do the conversion yourself, download the preconverted gguf model from [FLUX.1-Kontext-dev-GGUF](https://huggingface.co/QuantStack/FLUX.1-Kontext-dev-GGUF) + - Otherwise, download FLUX.1-Kontext-dev from https://huggingface.co/black-forest-labs/FLUX.1-Kontext-dev/blob/main/flux1-kontext-dev.safetensors +- Download vae from https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/ae.safetensors +- Download clip_l from https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors +- Download t5xxl from https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/t5xxl_fp16.safetensors + +## Convert Kontext weights + +You can download the preconverted gguf weights from [FLUX.1-Kontext-dev-GGUF](https://huggingface.co/QuantStack/FLUX.1-Kontext-dev-GGUF), this way you don't have to do the conversion yourself. + +``` +.\bin\Release\sd.exe -M convert -m ..\..\ComfyUI\models\unet\flux1-kontext-dev.safetensors -o ..\models\flux1-kontext-dev-q8_0.gguf -v --type q8_0 +``` + +## Run + +- `--cfg-scale` is recommended to be set to 1. + +### Example +For example: + +``` + .\bin\Release\sd.exe -M edit -r .\flux1-dev-q8_0.png --diffusion-model ..\models\flux1-kontext-dev-q8_0.gguf --vae ..\models\ae.sft --clip_l ..\models\clip_l.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -p "change 'flux.cpp' to 'kontext.cpp'" --cfg-scale 1.0 --sampling-method euler -v +``` + + +| ref_image | prompt | output | +| ---- | ---- |---- | +| ![](../assets/flux/flux1-dev-q8_0.png) | change 'flux.cpp' to 'kontext.cpp' |![](../assets/flux/kontext1_dev_output.png) | + + + diff --git a/docs/lcm.md b/docs/lcm.md new file mode 100644 index 000000000..14a363406 --- /dev/null +++ b/docs/lcm.md @@ -0,0 +1,15 @@ +## LCM/LCM-LoRA + +- Download LCM-LoRA form https://huggingface.co/latent-consistency/lcm-lora-sdv1-5 +- Specify LCM-LoRA by adding `` to prompt +- It's advisable to set `--cfg-scale` to `1.0` instead of the default `7.0`. For `--steps`, a range of `2-8` steps is recommended. For `--sampling-method`, `lcm`/`euler_a` is recommended. + +Here's a simple example: + +``` +./bin/sd -m ../models/v1-5-pruned-emaonly.safetensors -p "a lovely cat" --steps 4 --lora-model-dir ../models -v --cfg-scale 1 +``` + +| without LCM-LoRA (--cfg-scale 7) | with LCM-LoRA (--cfg-scale 1) | +| ---- |---- | +| ![](../assets/without_lcm.png) |![](../assets/with_lcm.png) | \ No newline at end of file diff --git a/docs/lora.md b/docs/lora.md new file mode 100644 index 000000000..fb76f287c --- /dev/null +++ b/docs/lora.md @@ -0,0 +1,13 @@ +## LoRA + +- You can specify the directory where the lora weights are stored via `--lora-model-dir`. If not specified, the default is the current working directory. + +- LoRA is specified via prompt, just like [stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Features#lora). + +Here's a simple example: + +``` +./bin/sd -m ../models/v1-5-pruned-emaonly.safetensors -p "a lovely cat" --lora-model-dir ../models +``` + +`../models/marblesh.safetensors` or `../models/marblesh.ckpt` will be applied to the model \ No newline at end of file diff --git a/docs/photo_maker.md b/docs/photo_maker.md new file mode 100644 index 000000000..8305a33bd --- /dev/null +++ b/docs/photo_maker.md @@ -0,0 +1,54 @@ +## Using PhotoMaker to personalize image generation + +You can use [PhotoMaker](https://github.com/TencentARC/PhotoMaker) to personalize generated images with your own ID. + +**NOTE**, currently PhotoMaker **ONLY** works with **SDXL** (any SDXL model files will work). + +Download PhotoMaker model file (in safetensor format) [here](https://huggingface.co/bssrdf/PhotoMaker). The official release of the model file (in .bin format) does not work with ```stablediffusion.cpp```. + +- Specify the PhotoMaker model path using the `--stacked-id-embd-dir PATH` parameter. +- Specify the input images path using the `--input-id-images-dir PATH` parameter. + - input images **must** have the same width and height for preprocessing (to be improved) + +In prompt, make sure you have a class word followed by the trigger word ```"img"``` (hard-coded for now). The class word could be one of ```"man, woman, girl, boy"```. If input ID images contain asian faces, add ```Asian``` before the class +word. + +Another PhotoMaker specific parameter: + +- ```--style-ratio (0-100)%```: default is 20 and 10-20 typically gets good results. Lower ratio means more faithfully following input ID (not necessarily better quality). + +Other parameters recommended for running Photomaker: + +- ```--cfg-scale 5.0``` +- ```-H 1024``` +- ```-W 1024``` + +If on low memory GPUs (<= 8GB), recommend running with ```--vae-on-cpu``` option to get artifact free images. + +Example: + +```bash +bin/sd -m ../models/sdxlUnstableDiffusers_v11.safetensors --vae ../models/sdxl_vae.safetensors --stacked-id-embd-dir ../models/photomaker-v1.safetensors --input-id-images-dir ../assets/photomaker_examples/scarletthead_woman -p "a girl img, retro futurism, retro game art style but extremely beautiful, intricate details, masterpiece, best quality, space-themed, cosmic, celestial, stars, galaxies, nebulas, planets, science fiction, highly detailed" -n "realistic, photo-realistic, worst quality, greyscale, bad anatomy, bad hands, error, text" --cfg-scale 5.0 --sampling-method euler -H 1024 -W 1024 --style-ratio 10 --vae-on-cpu -o output.png +``` + +## PhotoMaker Version 2 + +[PhotoMaker Version 2 (PMV2)](https://github.com/TencentARC/PhotoMaker/blob/main/README_pmv2.md) has some key improvements. Unfortunately it has a very heavy dependency which makes running it a bit involved in ```SD.cpp```. + +Running PMV2 is now a two-step process: + +- Run a python script ```face_detect.py``` to obtain **id_embeds** for the given input images +``` +python face_detect.py input_image_dir +``` +An ```id_embeds.safetensors``` file will be generated in ```input_images_dir``` + +**Note: this step is only needed to run once; the same ```id_embeds``` can be reused** + +- Run the same command as in version 1 but replacing ```photomaker-v1.safetensors``` with ```photomaker-v2.safetensors```. + + You can download ```photomaker-v2.safetensors``` from [here](https://huggingface.co/bssrdf/PhotoMakerV2) + +- All the command line parameters from Version 1 remain the same for Version 2 + + diff --git a/docs/quantization_and_gguf.md b/docs/quantization_and_gguf.md new file mode 100644 index 000000000..4f131555d --- /dev/null +++ b/docs/quantization_and_gguf.md @@ -0,0 +1,27 @@ +## Quantization + +You can specify the model weight type using the `--type` parameter. The weights are automatically converted when loading the model. + +- `f16` for 16-bit floating-point +- `f32` for 32-bit floating-point +- `q8_0` for 8-bit integer quantization +- `q5_0` or `q5_1` for 5-bit integer quantization +- `q4_0` or `q4_1` for 4-bit integer quantization + + +### Memory Requirements of Stable Diffusion 1.x + +| precision | f32 | f16 |q8_0 |q5_0 |q5_1 |q4_0 |q4_1 | +| ---- | ---- |---- |---- |---- |---- |---- |---- | +| **Memory** (txt2img - 512 x 512) | ~2.8G | ~2.3G | ~2.1G | ~2.0G | ~2.0G | ~2.0G | ~2.0G | +| **Memory** (txt2img - 512 x 512) *with Flash Attention* | ~2.4G | ~1.9G | ~1.6G | ~1.5G | ~1.5G | ~1.5G | ~1.5G | + +## Convert to GGUF + +You can also convert weights in the formats `ckpt/safetensors/diffusers` to gguf and perform quantization in advance, avoiding the need for quantization every time you load them. + +For example: + +```sh +./bin/sd -M convert -m ../models/v1-5-pruned-emaonly.safetensors -o ../models/v1-5-pruned-emaonly.q8_0.gguf -v --type q8_0 +``` \ No newline at end of file diff --git a/docs/sd3.md b/docs/sd3.md new file mode 100644 index 000000000..777511d4b --- /dev/null +++ b/docs/sd3.md @@ -0,0 +1,20 @@ +# How to Use + +## Download weights + +- Download sd3.5_large from https://huggingface.co/stabilityai/stable-diffusion-3.5-large/blob/main/sd3.5_large.safetensors +- Download clip_g from https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/blob/main/text_encoders/clip_g.safetensors +- Download clip_l from https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/blob/main/text_encoders/clip_l.safetensors +- Download t5xxl from https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/blob/main/text_encoders/t5xxl_fp16.safetensors + + +## Run + +### SD3.5 Large +For example: + +``` +.\bin\Release\sd.exe -m ..\models\sd3.5_large.safetensors --clip_l ..\models\clip_l.safetensors --clip_g ..\models\clip_g.safetensors --t5xxl ..\models\t5xxl_fp16.safetensors -H 1024 -W 1024 -p 'a lovely cat holding a sign says \"Stable diffusion 3.5 Large\"' --cfg-scale 4.5 --sampling-method euler -v +``` + +![](../assets/sd3.5_large.png) \ No newline at end of file diff --git a/docs/taesd.md b/docs/taesd.md new file mode 100644 index 000000000..3c7e6c1f4 --- /dev/null +++ b/docs/taesd.md @@ -0,0 +1,17 @@ +## Using TAESD to faster decoding + +You can use TAESD to accelerate the decoding of latent images by following these steps: + +- Download the model [weights](https://huggingface.co/madebyollin/taesd/blob/main/diffusion_pytorch_model.safetensors). + +Or curl + +```bash +curl -L -O https://huggingface.co/madebyollin/taesd/blob/main/diffusion_pytorch_model.safetensors +``` + +- Specify the model path using the `--taesd PATH` parameter. example: + +```bash +sd -m ../models/v1-5-pruned-emaonly.safetensors -p "a lovely cat" --taesd ../models/diffusion_pytorch_model.safetensors +``` \ No newline at end of file diff --git a/esrgan.hpp b/esrgan.hpp new file mode 100644 index 000000000..5cbb4ad8f --- /dev/null +++ b/esrgan.hpp @@ -0,0 +1,197 @@ +#ifndef __ESRGAN_HPP__ +#define __ESRGAN_HPP__ + +#include "ggml_extend.hpp" +#include "model.h" + +/* + =================================== ESRGAN =================================== + References: + https://github.com/xinntao/Real-ESRGAN/blob/master/inference_realesrgan.py + https://github.com/XPixelGroup/BasicSR/blob/v1.4.2/basicsr/archs/rrdbnet_arch.py + +*/ + +class ResidualDenseBlock : public GGMLBlock { +protected: + int num_feat; + int num_grow_ch; + +public: + ResidualDenseBlock(int num_feat = 64, int num_grow_ch = 32) + : num_feat(num_feat), num_grow_ch(num_grow_ch) { + blocks["conv1"] = std::shared_ptr(new Conv2d(num_feat, num_grow_ch, {3, 3}, {1, 1}, {1, 1})); + blocks["conv2"] = std::shared_ptr(new Conv2d(num_feat + num_grow_ch, num_grow_ch, {3, 3}, {1, 1}, {1, 1})); + blocks["conv3"] = std::shared_ptr(new Conv2d(num_feat + 2 * num_grow_ch, num_grow_ch, {3, 3}, {1, 1}, {1, 1})); + blocks["conv4"] = std::shared_ptr(new Conv2d(num_feat + 3 * num_grow_ch, num_grow_ch, {3, 3}, {1, 1}, {1, 1})); + blocks["conv5"] = std::shared_ptr(new Conv2d(num_feat + 4 * num_grow_ch, num_feat, {3, 3}, {1, 1}, {1, 1})); + } + + struct ggml_tensor* lrelu(struct ggml_context* ctx, struct ggml_tensor* x) { + return ggml_leaky_relu(ctx, x, 0.2f, true); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [n, num_feat, h, w] + // return: [n, num_feat, h, w] + + auto conv1 = std::dynamic_pointer_cast(blocks["conv1"]); + auto conv2 = std::dynamic_pointer_cast(blocks["conv2"]); + auto conv3 = std::dynamic_pointer_cast(blocks["conv3"]); + auto conv4 = std::dynamic_pointer_cast(blocks["conv4"]); + auto conv5 = std::dynamic_pointer_cast(blocks["conv5"]); + + auto x1 = lrelu(ctx, conv1->forward(ctx, x)); + auto x_cat = ggml_concat(ctx, x, x1, 2); + auto x2 = lrelu(ctx, conv2->forward(ctx, x_cat)); + x_cat = ggml_concat(ctx, x_cat, x2, 2); + auto x3 = lrelu(ctx, conv3->forward(ctx, x_cat)); + x_cat = ggml_concat(ctx, x_cat, x3, 2); + auto x4 = lrelu(ctx, conv4->forward(ctx, x_cat)); + x_cat = ggml_concat(ctx, x_cat, x4, 2); + auto x5 = conv5->forward(ctx, x_cat); + + x5 = ggml_add(ctx, ggml_scale(ctx, x5, 0.2f), x); + return x5; + } +}; + +class RRDB : public GGMLBlock { +public: + RRDB(int num_feat, int num_grow_ch = 32) { + blocks["rdb1"] = std::shared_ptr(new ResidualDenseBlock(num_feat, num_grow_ch)); + blocks["rdb2"] = std::shared_ptr(new ResidualDenseBlock(num_feat, num_grow_ch)); + blocks["rdb3"] = std::shared_ptr(new ResidualDenseBlock(num_feat, num_grow_ch)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [n, num_feat, h, w] + // return: [n, num_feat, h, w] + + auto rdb1 = std::dynamic_pointer_cast(blocks["rdb1"]); + auto rdb2 = std::dynamic_pointer_cast(blocks["rdb2"]); + auto rdb3 = std::dynamic_pointer_cast(blocks["rdb3"]); + + auto out = rdb1->forward(ctx, x); + out = rdb2->forward(ctx, out); + out = rdb3->forward(ctx, out); + + out = ggml_add(ctx, ggml_scale(ctx, out, 0.2f), x); + return out; + } +}; + +class RRDBNet : public GGMLBlock { +protected: + int scale = 4; // default RealESRGAN_x4plus_anime_6B + int num_block = 6; // default RealESRGAN_x4plus_anime_6B + int num_in_ch = 3; + int num_out_ch = 3; + int num_feat = 64; // default RealESRGAN_x4plus_anime_6B + int num_grow_ch = 32; // default RealESRGAN_x4plus_anime_6B + +public: + RRDBNet() { + blocks["conv_first"] = std::shared_ptr(new Conv2d(num_in_ch, num_feat, {3, 3}, {1, 1}, {1, 1})); + for (int i = 0; i < num_block; i++) { + std::string name = "body." + std::to_string(i); + blocks[name] = std::shared_ptr(new RRDB(num_feat, num_grow_ch)); + } + blocks["conv_body"] = std::shared_ptr(new Conv2d(num_feat, num_feat, {3, 3}, {1, 1}, {1, 1})); + // upsample + blocks["conv_up1"] = std::shared_ptr(new Conv2d(num_feat, num_feat, {3, 3}, {1, 1}, {1, 1})); + blocks["conv_up2"] = std::shared_ptr(new Conv2d(num_feat, num_feat, {3, 3}, {1, 1}, {1, 1})); + blocks["conv_hr"] = std::shared_ptr(new Conv2d(num_feat, num_feat, {3, 3}, {1, 1}, {1, 1})); + blocks["conv_last"] = std::shared_ptr(new Conv2d(num_feat, num_out_ch, {3, 3}, {1, 1}, {1, 1})); + } + + struct ggml_tensor* lrelu(struct ggml_context* ctx, struct ggml_tensor* x) { + return ggml_leaky_relu(ctx, x, 0.2f, true); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [n, num_in_ch, h, w] + // return: [n, num_out_ch, h*4, w*4] + auto conv_first = std::dynamic_pointer_cast(blocks["conv_first"]); + auto conv_body = std::dynamic_pointer_cast(blocks["conv_body"]); + auto conv_up1 = std::dynamic_pointer_cast(blocks["conv_up1"]); + auto conv_up2 = std::dynamic_pointer_cast(blocks["conv_up2"]); + auto conv_hr = std::dynamic_pointer_cast(blocks["conv_hr"]); + auto conv_last = std::dynamic_pointer_cast(blocks["conv_last"]); + + auto feat = conv_first->forward(ctx, x); + auto body_feat = feat; + for (int i = 0; i < num_block; i++) { + std::string name = "body." + std::to_string(i); + auto block = std::dynamic_pointer_cast(blocks[name]); + + body_feat = block->forward(ctx, body_feat); + } + body_feat = conv_body->forward(ctx, body_feat); + feat = ggml_add(ctx, feat, body_feat); + // upsample + feat = lrelu(ctx, conv_up1->forward(ctx, ggml_upscale(ctx, feat, 2, GGML_SCALE_MODE_NEAREST))); + feat = lrelu(ctx, conv_up2->forward(ctx, ggml_upscale(ctx, feat, 2, GGML_SCALE_MODE_NEAREST))); + auto out = conv_last->forward(ctx, lrelu(ctx, conv_hr->forward(ctx, feat))); + return out; + } +}; + +struct ESRGAN : public GGMLRunner { + RRDBNet rrdb_net; + int scale = 4; + int tile_size = 128; // avoid cuda OOM for 4gb VRAM + + ESRGAN(ggml_backend_t backend, std::map& tensor_types) + : GGMLRunner(backend) { + rrdb_net.init(params_ctx, tensor_types, ""); + } + + std::string get_desc() { + return "esrgan"; + } + + bool load_from_file(const std::string& file_path) { + LOG_INFO("loading esrgan from '%s'", file_path.c_str()); + + alloc_params_buffer(); + std::map esrgan_tensors; + rrdb_net.get_param_tensors(esrgan_tensors); + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path)) { + LOG_ERROR("init esrgan model loader from file failed: '%s'", file_path.c_str()); + return false; + } + + bool success = model_loader.load_tensors(esrgan_tensors, backend); + + if (!success) { + LOG_ERROR("load esrgan tensors from model loader failed"); + return false; + } + + LOG_INFO("esrgan model loaded"); + return success; + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* x) { + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + x = to_backend(x); + struct ggml_tensor* out = rrdb_net.forward(compute_ctx, x); + ggml_build_forward_expand(gf, out); + return gf; + } + + void compute(const int n_threads, + struct ggml_tensor* x, + ggml_tensor** output, + ggml_context* output_ctx = NULL) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(x); + }; + GGMLRunner::compute(get_graph, n_threads, false, output, output_ctx); + } +}; + +#endif // __ESRGAN_HPP__ \ No newline at end of file diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index a55396e20..81053f9e2 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,8 +1,3 @@ -# TODO: move into its own subdirectoy -# TODO: make stb libs a target (maybe common) -set(SD_TARGET sd) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}) -add_executable(${SD_TARGET} main.cpp stb_image.h stb_image_write.h) -install(TARGETS ${SD_TARGET} RUNTIME) -target_link_libraries(${SD_TARGET} PRIVATE stable-diffusion ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${SD_TARGET} PUBLIC cxx_std_11) +add_subdirectory(cli) \ No newline at end of file diff --git a/examples/cli/CMakeLists.txt b/examples/cli/CMakeLists.txt new file mode 100644 index 000000000..4861bd3c8 --- /dev/null +++ b/examples/cli/CMakeLists.txt @@ -0,0 +1,6 @@ +set(TARGET sd) + +add_executable(${TARGET} main.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE stable-diffusion ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PUBLIC cxx_std_11) \ No newline at end of file diff --git a/examples/cli/main.cpp b/examples/cli/main.cpp new file mode 100644 index 000000000..bb695c3bb --- /dev/null +++ b/examples/cli/main.cpp @@ -0,0 +1,1238 @@ +#include +#include +#include +#include +#include +#include +#include + +// #include "preprocessing.hpp" +#include "flux.hpp" +#include "stable-diffusion.h" + +#define STB_IMAGE_IMPLEMENTATION +#define STB_IMAGE_STATIC +#include "stb_image.h" + +#define STB_IMAGE_WRITE_IMPLEMENTATION +#define STB_IMAGE_WRITE_STATIC +#include "stb_image_write.h" + +#define STB_IMAGE_RESIZE_IMPLEMENTATION +#define STB_IMAGE_RESIZE_STATIC +#include "stb_image_resize.h" + +const char* rng_type_to_str[] = { + "std_default", + "cuda", +}; + +// Names of the sampler method, same order as enum sample_method in stable-diffusion.h +const char* sample_method_str[] = { + "euler_a", + "euler", + "heun", + "dpm2", + "dpm++2s_a", + "dpm++2m", + "dpm++2mv2", + "ipndm", + "ipndm_v", + "lcm", + "ddim_trailing", + "tcd", +}; + +// Names of the sigma schedule overrides, same order as sample_schedule in stable-diffusion.h +const char* schedule_str[] = { + "default", + "discrete", + "karras", + "exponential", + "ays", + "gits", +}; + +const char* modes_str[] = { + "txt2img", + "img2img", + "img2vid", + "edit", + "convert", +}; +#define SD_ALL_MODES_STR "txt2img, img2img, edit, convert" + +enum SDMode { + TXT2IMG, + IMG2IMG, + IMG2VID, + EDIT, + CONVERT, + MODE_COUNT +}; + +struct SDParams { + int n_threads = -1; + SDMode mode = TXT2IMG; + std::string model_path; + std::string clip_l_path; + std::string clip_g_path; + std::string t5xxl_path; + std::string diffusion_model_path; + std::string vae_path; + std::string taesd_path; + std::string esrgan_path; + std::string controlnet_path; + std::string embeddings_path; + std::string stacked_id_embeddings_path; + std::string input_id_images_path; + sd_type_t wtype = SD_TYPE_COUNT; + std::string tensor_type_rules; + std::string lora_model_dir; + std::string output_path = "output.png"; + std::string input_path; + std::string mask_path; + std::string control_image_path; + std::vector ref_image_paths; + + std::string prompt; + std::string negative_prompt; + float min_cfg = 1.0f; + float cfg_scale = 7.0f; + float guidance = 3.5f; + float eta = 0.f; + float style_ratio = 20.f; + int clip_skip = -1; // <= 0 represents unspecified + int width = 512; + int height = 512; + int batch_count = 1; + + int video_frames = 6; + int motion_bucket_id = 127; + int fps = 6; + float augmentation_level = 0.f; + + sample_method_t sample_method = EULER_A; + schedule_t schedule = DEFAULT; + int sample_steps = 20; + float strength = 0.75f; + float control_strength = 0.9f; + rng_type_t rng_type = CUDA_RNG; + int64_t seed = 42; + bool verbose = false; + bool vae_tiling = false; + bool control_net_cpu = false; + bool normalize_input = false; + bool clip_on_cpu = false; + bool vae_on_cpu = false; + bool diffusion_flash_attn = false; + bool canny_preprocess = false; + bool color = false; + int upscale_repeats = 1; + + std::vector skip_layers = {7, 8, 9}; + float slg_scale = 0.f; + float skip_layer_start = 0.01f; + float skip_layer_end = 0.2f; + + bool chroma_use_dit_mask = true; + bool chroma_use_t5_mask = false; + int chroma_t5_mask_pad = 1; +}; + +void print_params(SDParams params) { + printf("Option: \n"); + printf(" n_threads: %d\n", params.n_threads); + printf(" mode: %s\n", modes_str[params.mode]); + printf(" model_path: %s\n", params.model_path.c_str()); + printf(" wtype: %s\n", params.wtype < SD_TYPE_COUNT ? sd_type_name(params.wtype) : "unspecified"); + printf(" clip_l_path: %s\n", params.clip_l_path.c_str()); + printf(" clip_g_path: %s\n", params.clip_g_path.c_str()); + printf(" t5xxl_path: %s\n", params.t5xxl_path.c_str()); + printf(" diffusion_model_path: %s\n", params.diffusion_model_path.c_str()); + printf(" vae_path: %s\n", params.vae_path.c_str()); + printf(" taesd_path: %s\n", params.taesd_path.c_str()); + printf(" esrgan_path: %s\n", params.esrgan_path.c_str()); + printf(" controlnet_path: %s\n", params.controlnet_path.c_str()); + printf(" embeddings_path: %s\n", params.embeddings_path.c_str()); + printf(" stacked_id_embeddings_path: %s\n", params.stacked_id_embeddings_path.c_str()); + printf(" input_id_images_path: %s\n", params.input_id_images_path.c_str()); + printf(" style ratio: %.2f\n", params.style_ratio); + printf(" normalize input image : %s\n", params.normalize_input ? "true" : "false"); + printf(" output_path: %s\n", params.output_path.c_str()); + printf(" init_img: %s\n", params.input_path.c_str()); + printf(" mask_img: %s\n", params.mask_path.c_str()); + printf(" control_image: %s\n", params.control_image_path.c_str()); + printf(" ref_images_paths:\n"); + for (auto& path : params.ref_image_paths) { + printf(" %s\n", path.c_str()); + }; + printf(" clip on cpu: %s\n", params.clip_on_cpu ? "true" : "false"); + printf(" controlnet cpu: %s\n", params.control_net_cpu ? "true" : "false"); + printf(" vae decoder on cpu:%s\n", params.vae_on_cpu ? "true" : "false"); + printf(" diffusion flash attention:%s\n", params.diffusion_flash_attn ? "true" : "false"); + printf(" strength(control): %.2f\n", params.control_strength); + printf(" prompt: %s\n", params.prompt.c_str()); + printf(" negative_prompt: %s\n", params.negative_prompt.c_str()); + printf(" min_cfg: %.2f\n", params.min_cfg); + printf(" cfg_scale: %.2f\n", params.cfg_scale); + printf(" slg_scale: %.2f\n", params.slg_scale); + printf(" guidance: %.2f\n", params.guidance); + printf(" eta: %.2f\n", params.eta); + printf(" clip_skip: %d\n", params.clip_skip); + printf(" width: %d\n", params.width); + printf(" height: %d\n", params.height); + printf(" sample_method: %s\n", sample_method_str[params.sample_method]); + printf(" schedule: %s\n", schedule_str[params.schedule]); + printf(" sample_steps: %d\n", params.sample_steps); + printf(" strength(img2img): %.2f\n", params.strength); + printf(" rng: %s\n", rng_type_to_str[params.rng_type]); + printf(" seed: %ld\n", params.seed); + printf(" batch_count: %d\n", params.batch_count); + printf(" vae_tiling: %s\n", params.vae_tiling ? "true" : "false"); + printf(" upscale_repeats: %d\n", params.upscale_repeats); + printf(" chroma_use_dit_mask: %s\n", params.chroma_use_dit_mask ? "true" : "false"); + printf(" chroma_use_t5_mask: %s\n", params.chroma_use_t5_mask ? "true" : "false"); + printf(" chroma_t5_mask_pad: %d\n", params.chroma_t5_mask_pad); +} + +void print_usage(int argc, const char* argv[]) { + printf("usage: %s [arguments]\n", argv[0]); + printf("\n"); + printf("arguments:\n"); + printf(" -h, --help show this help message and exit\n"); + printf(" -M, --mode [MODE] run mode, one of:\n"); + printf(" txt2img: generate an image from a text prompt (default)\n"); + printf(" img2img: generate an image from a text prompt and an initial image (--init-img)\n"); + printf(" edit: modify an image (--ref-image) based on text instructions\n"); + printf(" convert: convert a model file to gguf format, optionally with quantization\n"); + printf(" -t, --threads N number of threads to use during computation (default: -1)\n"); + printf(" If threads <= 0, then threads will be set to the number of CPU physical cores\n"); + printf(" -m, --model [MODEL] path to full model\n"); + printf(" --diffusion-model path to the standalone diffusion model\n"); + printf(" --clip_l path to the clip-l text encoder\n"); + printf(" --clip_g path to the clip-g text encoder\n"); + printf(" --t5xxl path to the t5xxl text encoder\n"); + printf(" --vae [VAE] path to vae\n"); + printf(" --taesd [TAESD_PATH] path to taesd. Using Tiny AutoEncoder for fast decoding (low quality)\n"); + printf(" --control-net [CONTROL_PATH] path to control net model\n"); + printf(" --embd-dir [EMBEDDING_PATH] path to embeddings\n"); + printf(" --stacked-id-embd-dir [DIR] path to PHOTOMAKER stacked id embeddings\n"); + printf(" --input-id-images-dir [DIR] path to PHOTOMAKER input id images dir\n"); + printf(" --normalize-input normalize PHOTOMAKER input id images\n"); + printf(" --upscale-model [ESRGAN_PATH] path to esrgan model. Upscale images after generate, just RealESRGAN_x4plus_anime_6B supported by now\n"); + printf(" --upscale-repeats Run the ESRGAN upscaler this many times (default 1)\n"); + printf(" --type [TYPE] weight type (examples: f32, f16, q4_0, q4_1, q5_0, q5_1, q8_0, q2_K, q3_K, q4_K)\n"); + printf(" If not specified, the default is the type of the weight file\n"); + printf(" --tensor-type-rules [EXPRESSION] weight type per tensor pattern (example: \"^vae\\.=f16,model\\.=q8_0\")\n"); + printf(" --lora-model-dir [DIR] lora model directory\n"); + printf(" -i, --init-img [IMAGE] path to the input image, required by img2img\n"); + printf(" --mask [MASK] path to the mask image, required by img2img with mask\n"); + printf(" --control-image [IMAGE] path to image condition, control net\n"); + printf(" -r, --ref-image [PATH] reference image for Flux Kontext models (can be used multiple times) \n"); + printf(" -o, --output OUTPUT path to write result image to (default: ./output.png)\n"); + printf(" -p, --prompt [PROMPT] the prompt to render\n"); + printf(" -n, --negative-prompt PROMPT the negative prompt (default: \"\")\n"); + printf(" --cfg-scale SCALE unconditional guidance scale: (default: 7.0)\n"); + printf(" --guidance SCALE guidance scale for img2img (default: 3.5)\n"); + printf(" --slg-scale SCALE skip layer guidance (SLG) scale, only for DiT models: (default: 0)\n"); + printf(" 0 means disabled, a value of 2.5 is nice for sd3.5 medium\n"); + printf(" --eta SCALE eta in DDIM, only for DDIM and TCD: (default: 0)\n"); + printf(" --skip-layers LAYERS Layers to skip for SLG steps: (default: [7,8,9])\n"); + printf(" --skip-layer-start START SLG enabling point: (default: 0.01)\n"); + printf(" --skip-layer-end END SLG disabling point: (default: 0.2)\n"); + printf(" SLG will be enabled at step int([STEPS]*[START]) and disabled at int([STEPS]*[END])\n"); + printf(" --strength STRENGTH strength for noising/unnoising (default: 0.75)\n"); + printf(" --style-ratio STYLE-RATIO strength for keeping input identity (default: 20%%)\n"); + printf(" --control-strength STRENGTH strength to apply Control Net (default: 0.9)\n"); + printf(" 1.0 corresponds to full destruction of information in init image\n"); + printf(" -H, --height H image height, in pixel space (default: 512)\n"); + printf(" -W, --width W image width, in pixel space (default: 512)\n"); + printf(" --sampling-method {euler, euler_a, heun, dpm2, dpm++2s_a, dpm++2m, dpm++2mv2, ipndm, ipndm_v, lcm, ddim_trailing, tcd}\n"); + printf(" sampling method (default: \"euler_a\")\n"); + printf(" --steps STEPS number of sample steps (default: 20)\n"); + printf(" --rng {std_default, cuda} RNG (default: cuda)\n"); + printf(" -s SEED, --seed SEED RNG seed (default: 42, use random seed for < 0)\n"); + printf(" -b, --batch-count COUNT number of images to generate\n"); + printf(" --schedule {discrete, karras, exponential, ays, gits} Denoiser sigma schedule (default: discrete)\n"); + printf(" --clip-skip N ignore last layers of CLIP network; 1 ignores none, 2 ignores one layer (default: -1)\n"); + printf(" <= 0 represents unspecified, will be 1 for SD1.x, 2 for SD2.x\n"); + printf(" --vae-tiling process vae in tiles to reduce memory usage\n"); + printf(" --vae-on-cpu keep vae in cpu (for low vram)\n"); + printf(" --clip-on-cpu keep clip in cpu (for low vram)\n"); + printf(" --diffusion-fa use flash attention in the diffusion model (for low vram)\n"); + printf(" Might lower quality, since it implies converting k and v to f16.\n"); + printf(" This might crash if it is not supported by the backend.\n"); + printf(" --control-net-cpu keep controlnet in cpu (for low vram)\n"); + printf(" --canny apply canny preprocessor (edge detection)\n"); + printf(" --color colors the logging tags according to level\n"); + printf(" --chroma-disable-dit-mask disable dit mask for chroma\n"); + printf(" --chroma-enable-t5-mask enable t5 mask for chroma\n"); + printf(" --chroma-t5-mask-pad PAD_SIZE t5 mask pad size of chroma\n"); + printf(" -v, --verbose print extra info\n"); +} + +void parse_args(int argc, const char** argv, SDParams& params) { + bool invalid_arg = false; + std::string arg; + for (int i = 1; i < argc; i++) { + arg = argv[i]; + + if (arg == "-t" || arg == "--threads") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.n_threads = std::stoi(argv[i]); + } else if (arg == "-M" || arg == "--mode") { + if (++i >= argc) { + invalid_arg = true; + break; + } + const char* mode_selected = argv[i]; + int mode_found = -1; + for (int d = 0; d < MODE_COUNT; d++) { + if (!strcmp(mode_selected, modes_str[d])) { + mode_found = d; + } + } + if (mode_found == -1) { + fprintf(stderr, + "error: invalid mode %s, must be one of [%s]\n", + mode_selected, SD_ALL_MODES_STR); + exit(1); + } + params.mode = (SDMode)mode_found; + } else if (arg == "-m" || arg == "--model") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.model_path = argv[i]; + } else if (arg == "--clip_l") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.clip_l_path = argv[i]; + } else if (arg == "--clip_g") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.clip_g_path = argv[i]; + } else if (arg == "--t5xxl") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.t5xxl_path = argv[i]; + } else if (arg == "--diffusion-model") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.diffusion_model_path = argv[i]; + } else if (arg == "--vae") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.vae_path = argv[i]; + } else if (arg == "--taesd") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.taesd_path = argv[i]; + } else if (arg == "--control-net") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.controlnet_path = argv[i]; + } else if (arg == "--upscale-model") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.esrgan_path = argv[i]; + } else if (arg == "--embd-dir") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.embeddings_path = argv[i]; + } else if (arg == "--stacked-id-embd-dir") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.stacked_id_embeddings_path = argv[i]; + } else if (arg == "--input-id-images-dir") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.input_id_images_path = argv[i]; + } else if (arg == "--type") { + if (++i >= argc) { + invalid_arg = true; + break; + } + std::string type = argv[i]; + bool found = false; + std::string valid_types = ""; + for (size_t i = 0; i < SD_TYPE_COUNT; i++) { + auto trait = ggml_get_type_traits((ggml_type)i); + std::string name(trait->type_name); + if (name == "f32" || trait->to_float && trait->type_size) { + if (i) + valid_types += ", "; + valid_types += name; + if (type == name) { + if (ggml_quantize_requires_imatrix((ggml_type)i)) { + printf("\033[35;1m[WARNING]\033[0m: type %s requires imatrix to work properly. A dummy imatrix will be used, expect poor quality.\n", trait->type_name); + } + params.wtype = (enum sd_type_t)i; + found = true; + break; + } + } + } + if (!found) { + fprintf(stderr, "error: invalid weight format %s, must be one of [%s]\n", + type.c_str(), + valid_types.c_str()); + exit(1); + } + } else if (arg == "--tensor-type-rules") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.tensor_type_rules = argv[i]; + } else if (arg == "--lora-model-dir") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.lora_model_dir = argv[i]; + } else if (arg == "-i" || arg == "--init-img") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.input_path = argv[i]; + } else if (arg == "--mask") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.mask_path = argv[i]; + } else if (arg == "--control-image") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.control_image_path = argv[i]; + } else if (arg == "-o" || arg == "--output") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.output_path = argv[i]; + } else if (arg == "-p" || arg == "--prompt") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.prompt = argv[i]; + } else if (arg == "--upscale-repeats") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.upscale_repeats = std::stoi(argv[i]); + if (params.upscale_repeats < 1) { + fprintf(stderr, "error: upscale multiplier must be at least 1\n"); + exit(1); + } + } else if (arg == "-n" || arg == "--negative-prompt") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.negative_prompt = argv[i]; + } else if (arg == "--cfg-scale") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.cfg_scale = std::stof(argv[i]); + } else if (arg == "--guidance") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.guidance = std::stof(argv[i]); + } else if (arg == "--eta") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.eta = std::stof(argv[i]); + } else if (arg == "--strength") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.strength = std::stof(argv[i]); + } else if (arg == "--style-ratio") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.style_ratio = std::stof(argv[i]); + } else if (arg == "--control-strength") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.control_strength = std::stof(argv[i]); + } else if (arg == "-H" || arg == "--height") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.height = std::stoi(argv[i]); + } else if (arg == "-W" || arg == "--width") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.width = std::stoi(argv[i]); + } else if (arg == "--steps") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.sample_steps = std::stoi(argv[i]); + } else if (arg == "--clip-skip") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.clip_skip = std::stoi(argv[i]); + } else if (arg == "--vae-tiling") { + params.vae_tiling = true; + } else if (arg == "--control-net-cpu") { + params.control_net_cpu = true; + } else if (arg == "--normalize-input") { + params.normalize_input = true; + } else if (arg == "--clip-on-cpu") { + params.clip_on_cpu = true; // will slow down get_learned_condiotion but necessary for low MEM GPUs + } else if (arg == "--vae-on-cpu") { + params.vae_on_cpu = true; // will slow down latent decoding but necessary for low MEM GPUs + } else if (arg == "--diffusion-fa") { + params.diffusion_flash_attn = true; // can reduce MEM significantly + } else if (arg == "--canny") { + params.canny_preprocess = true; + } else if (arg == "-b" || arg == "--batch-count") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.batch_count = std::stoi(argv[i]); + } else if (arg == "--rng") { + if (++i >= argc) { + invalid_arg = true; + break; + } + std::string rng_type_str = argv[i]; + if (rng_type_str == "std_default") { + params.rng_type = STD_DEFAULT_RNG; + } else if (rng_type_str == "cuda") { + params.rng_type = CUDA_RNG; + } else { + invalid_arg = true; + break; + } + } else if (arg == "--schedule") { + if (++i >= argc) { + invalid_arg = true; + break; + } + const char* schedule_selected = argv[i]; + int schedule_found = -1; + for (int d = 0; d < N_SCHEDULES; d++) { + if (!strcmp(schedule_selected, schedule_str[d])) { + schedule_found = d; + } + } + if (schedule_found == -1) { + invalid_arg = true; + break; + } + params.schedule = (schedule_t)schedule_found; + } else if (arg == "-s" || arg == "--seed") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.seed = std::stoll(argv[i]); + } else if (arg == "--sampling-method") { + if (++i >= argc) { + invalid_arg = true; + break; + } + const char* sample_method_selected = argv[i]; + int sample_method_found = -1; + for (int m = 0; m < N_SAMPLE_METHODS; m++) { + if (!strcmp(sample_method_selected, sample_method_str[m])) { + sample_method_found = m; + } + } + if (sample_method_found == -1) { + invalid_arg = true; + break; + } + params.sample_method = (sample_method_t)sample_method_found; + } else if (arg == "-h" || arg == "--help") { + print_usage(argc, argv); + exit(0); + } else if (arg == "-v" || arg == "--verbose") { + params.verbose = true; + } else if (arg == "--color") { + params.color = true; + } else if (arg == "--slg-scale") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.slg_scale = std::stof(argv[i]); + } else if (arg == "--skip-layers") { + if (++i >= argc) { + invalid_arg = true; + break; + } + if (argv[i][0] != '[') { + invalid_arg = true; + break; + } + std::string layers_str = argv[i]; + while (layers_str.back() != ']') { + if (++i >= argc) { + invalid_arg = true; + break; + } + layers_str += " " + std::string(argv[i]); + } + layers_str = layers_str.substr(1, layers_str.size() - 2); + + std::regex regex("[, ]+"); + std::sregex_token_iterator iter(layers_str.begin(), layers_str.end(), regex, -1); + std::sregex_token_iterator end; + std::vector tokens(iter, end); + std::vector layers; + for (const auto& token : tokens) { + try { + layers.push_back(std::stoi(token)); + } catch (const std::invalid_argument& e) { + invalid_arg = true; + break; + } + } + params.skip_layers = layers; + + if (invalid_arg) { + break; + } + } else if (arg == "--skip-layer-start") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.skip_layer_start = std::stof(argv[i]); + } else if (arg == "--skip-layer-end") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.skip_layer_end = std::stof(argv[i]); + } else if (arg == "-r" || arg == "--ref-image") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.ref_image_paths.push_back(argv[i]); + } else if (arg == "--chroma-disable-dit-mask") { + params.chroma_use_dit_mask = false; + } else if (arg == "--chroma-enable-t5-mask") { + params.chroma_use_t5_mask = true; + } else if (arg == "--chroma-t5-mask-pad") { + if (++i >= argc) { + invalid_arg = true; + break; + } + params.chroma_t5_mask_pad = std::stoi(argv[i]); + } else { + fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); + print_usage(argc, argv); + exit(1); + } + } + if (invalid_arg) { + fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); + print_usage(argc, argv); + exit(1); + } + if (params.n_threads <= 0) { + params.n_threads = get_num_physical_cores(); + } + + if (params.mode != CONVERT && params.mode != IMG2VID && params.prompt.length() == 0) { + fprintf(stderr, "error: the following arguments are required: prompt\n"); + print_usage(argc, argv); + exit(1); + } + + if (params.model_path.length() == 0 && params.diffusion_model_path.length() == 0) { + fprintf(stderr, "error: the following arguments are required: model_path/diffusion_model\n"); + print_usage(argc, argv); + exit(1); + } + + if ((params.mode == IMG2IMG || params.mode == IMG2VID) && params.input_path.length() == 0) { + fprintf(stderr, "error: when using the img2img/img2vid mode, the following arguments are required: init-img\n"); + print_usage(argc, argv); + exit(1); + } + + if (params.mode == EDIT && params.ref_image_paths.size() == 0) { + fprintf(stderr, "error: when using the edit mode, the following arguments are required: ref-image\n"); + print_usage(argc, argv); + exit(1); + } + + if (params.output_path.length() == 0) { + fprintf(stderr, "error: the following arguments are required: output_path\n"); + print_usage(argc, argv); + exit(1); + } + + if (params.width <= 0 || params.width % 64 != 0) { + fprintf(stderr, "error: the width must be a multiple of 64\n"); + exit(1); + } + + if (params.height <= 0 || params.height % 64 != 0) { + fprintf(stderr, "error: the height must be a multiple of 64\n"); + exit(1); + } + + if (params.sample_steps <= 0) { + fprintf(stderr, "error: the sample_steps must be greater than 0\n"); + exit(1); + } + + if (params.strength < 0.f || params.strength > 1.f) { + fprintf(stderr, "error: can only work with strength in [0.0, 1.0]\n"); + exit(1); + } + + if (params.mode != CONVERT && params.tensor_type_rules.size() > 0) { + fprintf(stderr, "warning: --tensor-type-rules is currently supported only for conversion\n"); + } + + if (params.seed < 0) { + srand((int)time(NULL)); + params.seed = rand(); + } + + if (params.mode == CONVERT) { + if (params.output_path == "output.png") { + params.output_path = "output.gguf"; + } + } +} + +static std::string sd_basename(const std::string& path) { + size_t pos = path.find_last_of('/'); + if (pos != std::string::npos) { + return path.substr(pos + 1); + } + pos = path.find_last_of('\\'); + if (pos != std::string::npos) { + return path.substr(pos + 1); + } + return path; +} + +std::string get_image_params(SDParams params, int64_t seed) { + std::string parameter_string = params.prompt + "\n"; + if (params.negative_prompt.size() != 0) { + parameter_string += "Negative prompt: " + params.negative_prompt + "\n"; + } + parameter_string += "Steps: " + std::to_string(params.sample_steps) + ", "; + parameter_string += "CFG scale: " + std::to_string(params.cfg_scale) + ", "; + if (params.slg_scale != 0 && params.skip_layers.size() != 0) { + parameter_string += "SLG scale: " + std::to_string(params.cfg_scale) + ", "; + parameter_string += "Skip layers: ["; + for (const auto& layer : params.skip_layers) { + parameter_string += std::to_string(layer) + ", "; + } + parameter_string += "], "; + parameter_string += "Skip layer start: " + std::to_string(params.skip_layer_start) + ", "; + parameter_string += "Skip layer end: " + std::to_string(params.skip_layer_end) + ", "; + } + parameter_string += "Guidance: " + std::to_string(params.guidance) + ", "; + parameter_string += "Eta: " + std::to_string(params.eta) + ", "; + parameter_string += "Seed: " + std::to_string(seed) + ", "; + parameter_string += "Size: " + std::to_string(params.width) + "x" + std::to_string(params.height) + ", "; + parameter_string += "Model: " + sd_basename(params.model_path) + ", "; + parameter_string += "RNG: " + std::string(rng_type_to_str[params.rng_type]) + ", "; + parameter_string += "Sampler: " + std::string(sample_method_str[params.sample_method]); + if (params.schedule == KARRAS) { + parameter_string += " karras"; + } + parameter_string += ", "; + parameter_string += "Version: stable-diffusion.cpp"; + return parameter_string; +} + +/* Enables Printing the log level tag in color using ANSI escape codes */ +void sd_log_cb(enum sd_log_level_t level, const char* log, void* data) { + SDParams* params = (SDParams*)data; + int tag_color; + const char* level_str; + FILE* out_stream = (level == SD_LOG_ERROR) ? stderr : stdout; + + if (!log || (!params->verbose && level <= SD_LOG_DEBUG)) { + return; + } + + switch (level) { + case SD_LOG_DEBUG: + tag_color = 37; + level_str = "DEBUG"; + break; + case SD_LOG_INFO: + tag_color = 34; + level_str = "INFO"; + break; + case SD_LOG_WARN: + tag_color = 35; + level_str = "WARN"; + break; + case SD_LOG_ERROR: + tag_color = 31; + level_str = "ERROR"; + break; + default: /* Potential future-proofing */ + tag_color = 33; + level_str = "?????"; + break; + } + + if (params->color == true) { + fprintf(out_stream, "\033[%d;1m[%-5s]\033[0m ", tag_color, level_str); + } else { + fprintf(out_stream, "[%-5s] ", level_str); + } + fputs(log, out_stream); + fflush(out_stream); +} + +int main(int argc, const char* argv[]) { + SDParams params; + + parse_args(argc, argv, params); + + sd_set_log_callback(sd_log_cb, (void*)¶ms); + + if (params.verbose) { + print_params(params); + printf("%s", sd_get_system_info()); + } + + if (params.mode == CONVERT) { + bool success = convert(params.model_path.c_str(), params.vae_path.c_str(), params.output_path.c_str(), params.wtype, params.tensor_type_rules.c_str()); + if (!success) { + fprintf(stderr, + "convert '%s'/'%s' to '%s' failed\n", + params.model_path.c_str(), + params.vae_path.c_str(), + params.output_path.c_str()); + return 1; + } else { + printf("convert '%s'/'%s' to '%s' success\n", + params.model_path.c_str(), + params.vae_path.c_str(), + params.output_path.c_str()); + return 0; + } + } + + if (params.mode == IMG2VID) { + fprintf(stderr, "SVD support is broken, do not use it!!!\n"); + return 1; + } + + bool vae_decode_only = true; + uint8_t* input_image_buffer = NULL; + uint8_t* control_image_buffer = NULL; + uint8_t* mask_image_buffer = NULL; + std::vector ref_images; + + if (params.mode == IMG2IMG || params.mode == IMG2VID) { + vae_decode_only = false; + + int c = 0; + int width = 0; + int height = 0; + input_image_buffer = stbi_load(params.input_path.c_str(), &width, &height, &c, 3); + if (input_image_buffer == NULL) { + fprintf(stderr, "load image from '%s' failed\n", params.input_path.c_str()); + return 1; + } + if (c < 3) { + fprintf(stderr, "the number of channels for the input image must be >= 3, but got %d channels\n", c); + free(input_image_buffer); + return 1; + } + if (width <= 0) { + fprintf(stderr, "error: the width of image must be greater than 0\n"); + free(input_image_buffer); + return 1; + } + if (height <= 0) { + fprintf(stderr, "error: the height of image must be greater than 0\n"); + free(input_image_buffer); + return 1; + } + + // Resize input image ... + if (params.height != height || params.width != width) { + printf("resize input image from %dx%d to %dx%d\n", width, height, params.width, params.height); + int resized_height = params.height; + int resized_width = params.width; + + uint8_t* resized_image_buffer = (uint8_t*)malloc(resized_height * resized_width * 3); + if (resized_image_buffer == NULL) { + fprintf(stderr, "error: allocate memory for resize input image\n"); + free(input_image_buffer); + return 1; + } + stbir_resize(input_image_buffer, width, height, 0, + resized_image_buffer, resized_width, resized_height, 0, STBIR_TYPE_UINT8, + 3 /*RGB channel*/, STBIR_ALPHA_CHANNEL_NONE, 0, + STBIR_EDGE_CLAMP, STBIR_EDGE_CLAMP, + STBIR_FILTER_BOX, STBIR_FILTER_BOX, + STBIR_COLORSPACE_SRGB, nullptr); + + // Save resized result + free(input_image_buffer); + input_image_buffer = resized_image_buffer; + } + } else if (params.mode == EDIT) { + vae_decode_only = false; + for (auto& path : params.ref_image_paths) { + int c = 0; + int width = 0; + int height = 0; + uint8_t* image_buffer = stbi_load(path.c_str(), &width, &height, &c, 3); + if (image_buffer == NULL) { + fprintf(stderr, "load image from '%s' failed\n", path.c_str()); + return 1; + } + if (c < 3) { + fprintf(stderr, "the number of channels for the input image must be >= 3, but got %d channels\n", c); + free(image_buffer); + return 1; + } + if (width <= 0) { + fprintf(stderr, "error: the width of image must be greater than 0\n"); + free(image_buffer); + return 1; + } + if (height <= 0) { + fprintf(stderr, "error: the height of image must be greater than 0\n"); + free(image_buffer); + return 1; + } + ref_images.push_back({(uint32_t)width, + (uint32_t)height, + 3, + image_buffer}); + } + } + + sd_ctx_t* sd_ctx = new_sd_ctx(params.model_path.c_str(), + params.clip_l_path.c_str(), + params.clip_g_path.c_str(), + params.t5xxl_path.c_str(), + params.diffusion_model_path.c_str(), + params.vae_path.c_str(), + params.taesd_path.c_str(), + params.controlnet_path.c_str(), + params.lora_model_dir.c_str(), + params.embeddings_path.c_str(), + params.stacked_id_embeddings_path.c_str(), + vae_decode_only, + params.vae_tiling, + true, + params.n_threads, + params.wtype, + params.rng_type, + params.schedule, + params.clip_on_cpu, + params.control_net_cpu, + params.vae_on_cpu, + params.diffusion_flash_attn, + params.chroma_use_dit_mask, + params.chroma_use_t5_mask, + params.chroma_t5_mask_pad); + + if (sd_ctx == NULL) { + printf("new_sd_ctx_t failed\n"); + return 1; + } + + sd_image_t* control_image = NULL; + if (params.controlnet_path.size() > 0 && params.control_image_path.size() > 0) { + int c = 0; + control_image_buffer = stbi_load(params.control_image_path.c_str(), ¶ms.width, ¶ms.height, &c, 3); + if (control_image_buffer == NULL) { + fprintf(stderr, "load image from '%s' failed\n", params.control_image_path.c_str()); + return 1; + } + control_image = new sd_image_t{(uint32_t)params.width, + (uint32_t)params.height, + 3, + control_image_buffer}; + if (params.canny_preprocess) { // apply preprocessor + control_image->data = preprocess_canny(control_image->data, + control_image->width, + control_image->height, + 0.08f, + 0.08f, + 0.8f, + 1.0f, + false); + } + } + + std::vector default_mask_image_vec(params.width * params.height, 255); + if (params.mask_path != "") { + int c = 0; + mask_image_buffer = stbi_load(params.mask_path.c_str(), ¶ms.width, ¶ms.height, &c, 1); + } else { + mask_image_buffer = default_mask_image_vec.data(); + } + sd_image_t mask_image = {(uint32_t)params.width, + (uint32_t)params.height, + 1, + mask_image_buffer}; + + sd_image_t* results; + if (params.mode == TXT2IMG) { + results = txt2img(sd_ctx, + params.prompt.c_str(), + params.negative_prompt.c_str(), + params.clip_skip, + params.cfg_scale, + params.guidance, + params.eta, + params.width, + params.height, + params.sample_method, + params.sample_steps, + params.seed, + params.batch_count, + control_image, + params.control_strength, + params.style_ratio, + params.normalize_input, + params.input_id_images_path.c_str(), + params.skip_layers.data(), + params.skip_layers.size(), + params.slg_scale, + params.skip_layer_start, + params.skip_layer_end); + } else if (params.mode == IMG2IMG || params.mode == IMG2VID) { + sd_image_t input_image = {(uint32_t)params.width, + (uint32_t)params.height, + 3, + input_image_buffer}; + + if (params.mode == IMG2VID) { + results = img2vid(sd_ctx, + input_image, + params.width, + params.height, + params.video_frames, + params.motion_bucket_id, + params.fps, + params.augmentation_level, + params.min_cfg, + params.cfg_scale, + params.sample_method, + params.sample_steps, + params.strength, + params.seed); + if (results == NULL) { + printf("generate failed\n"); + free_sd_ctx(sd_ctx); + return 1; + } + size_t last = params.output_path.find_last_of("."); + std::string dummy_name = last != std::string::npos ? params.output_path.substr(0, last) : params.output_path; + for (int i = 0; i < params.video_frames; i++) { + if (results[i].data == NULL) { + continue; + } + std::string final_image_path = i > 0 ? dummy_name + "_" + std::to_string(i + 1) + ".png" : dummy_name + ".png"; + stbi_write_png(final_image_path.c_str(), results[i].width, results[i].height, results[i].channel, + results[i].data, 0, get_image_params(params, params.seed + i).c_str()); + printf("save result image to '%s'\n", final_image_path.c_str()); + free(results[i].data); + results[i].data = NULL; + } + free(results); + free_sd_ctx(sd_ctx); + return 0; + } else { + results = img2img(sd_ctx, + input_image, + mask_image, + params.prompt.c_str(), + params.negative_prompt.c_str(), + params.clip_skip, + params.cfg_scale, + params.guidance, + params.eta, + params.width, + params.height, + params.sample_method, + params.sample_steps, + params.strength, + params.seed, + params.batch_count, + control_image, + params.control_strength, + params.style_ratio, + params.normalize_input, + params.input_id_images_path.c_str(), + params.skip_layers.data(), + params.skip_layers.size(), + params.slg_scale, + params.skip_layer_start, + params.skip_layer_end); + } + } else { // EDIT + results = edit(sd_ctx, + ref_images.data(), + ref_images.size(), + params.prompt.c_str(), + params.negative_prompt.c_str(), + params.clip_skip, + params.cfg_scale, + params.guidance, + params.eta, + params.width, + params.height, + params.sample_method, + params.sample_steps, + params.strength, + params.seed, + params.batch_count, + control_image, + params.control_strength, + params.style_ratio, + params.normalize_input, + params.skip_layers.data(), + params.skip_layers.size(), + params.slg_scale, + params.skip_layer_start, + params.skip_layer_end); + } + + if (results == NULL) { + printf("generate failed\n"); + free_sd_ctx(sd_ctx); + return 1; + } + + int upscale_factor = 4; // unused for RealESRGAN_x4plus_anime_6B.pth + if (params.esrgan_path.size() > 0 && params.upscale_repeats > 0) { + upscaler_ctx_t* upscaler_ctx = new_upscaler_ctx(params.esrgan_path.c_str(), + params.n_threads); + + if (upscaler_ctx == NULL) { + printf("new_upscaler_ctx failed\n"); + } else { + for (int i = 0; i < params.batch_count; i++) { + if (results[i].data == NULL) { + continue; + } + sd_image_t current_image = results[i]; + for (int u = 0; u < params.upscale_repeats; ++u) { + sd_image_t upscaled_image = upscale(upscaler_ctx, current_image, upscale_factor); + if (upscaled_image.data == NULL) { + printf("upscale failed\n"); + break; + } + free(current_image.data); + current_image = upscaled_image; + } + results[i] = current_image; // Set the final upscaled image as the result + } + } + } + + std::string dummy_name, ext, lc_ext; + bool is_jpg; + size_t last = params.output_path.find_last_of("."); + size_t last_path = std::min(params.output_path.find_last_of("/"), + params.output_path.find_last_of("\\")); + if (last != std::string::npos // filename has extension + && (last_path == std::string::npos || last > last_path)) { + dummy_name = params.output_path.substr(0, last); + ext = lc_ext = params.output_path.substr(last); + std::transform(ext.begin(), ext.end(), lc_ext.begin(), ::tolower); + is_jpg = lc_ext == ".jpg" || lc_ext == ".jpeg" || lc_ext == ".jpe"; + } else { + dummy_name = params.output_path; + ext = lc_ext = ""; + is_jpg = false; + } + // appending ".png" to absent or unknown extension + if (!is_jpg && lc_ext != ".png") { + dummy_name += ext; + ext = ".png"; + } + for (int i = 0; i < params.batch_count; i++) { + if (results[i].data == NULL) { + continue; + } + std::string final_image_path = i > 0 ? dummy_name + "_" + std::to_string(i + 1) + ext : dummy_name + ext; + if (is_jpg) { + stbi_write_jpg(final_image_path.c_str(), results[i].width, results[i].height, results[i].channel, + results[i].data, 90, get_image_params(params, params.seed + i).c_str()); + printf("save result JPEG image to '%s'\n", final_image_path.c_str()); + } else { + stbi_write_png(final_image_path.c_str(), results[i].width, results[i].height, results[i].channel, + results[i].data, 0, get_image_params(params, params.seed + i).c_str()); + printf("save result PNG image to '%s'\n", final_image_path.c_str()); + } + free(results[i].data); + results[i].data = NULL; + } + free(results); + free_sd_ctx(sd_ctx); + free(control_image_buffer); + free(input_image_buffer); + + return 0; +} diff --git a/examples/main.cpp b/examples/main.cpp deleted file mode 100644 index 0412d4316..000000000 --- a/examples/main.cpp +++ /dev/null @@ -1,363 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -#include "stable-diffusion.h" - -#define STB_IMAGE_IMPLEMENTATION -#include "stb_image.h" - -#define STB_IMAGE_WRITE_IMPLEMENTATION -#define STB_IMAGE_WRITE_STATIC -#include "stb_image_write.h" - -#if defined(__APPLE__) && defined(__MACH__) -#include -#include -#endif - -#if !defined(_WIN32) -#include -#include -#endif - -#define TXT2IMG "txt2img" -#define IMG2IMG "img2img" - -// get_num_physical_cores is copy from -// https://github.com/ggerganov/llama.cpp/blob/master/examples/common.cpp -// LICENSE: https://github.com/ggerganov/llama.cpp/blob/master/LICENSE -int32_t get_num_physical_cores() { -#ifdef __linux__ - // enumerate the set of thread siblings, num entries is num cores - std::unordered_set siblings; - for (uint32_t cpu=0; cpu < UINT32_MAX; ++cpu) { - std::ifstream thread_siblings("/sys/devices/system/cpu" - + std::to_string(cpu) + "/topology/thread_siblings"); - if (!thread_siblings.is_open()) { - break; // no more cpus - } - std::string line; - if (std::getline(thread_siblings, line)) { - siblings.insert(line); - } - } - if (siblings.size() > 0) { - return static_cast(siblings.size()); - } -#elif defined(__APPLE__) && defined(__MACH__) - int32_t num_physical_cores; - size_t len = sizeof(num_physical_cores); - int result = sysctlbyname("hw.perflevel0.physicalcpu", &num_physical_cores, &len, NULL, 0); - if (result == 0) { - return num_physical_cores; - } - result = sysctlbyname("hw.physicalcpu", &num_physical_cores, &len, NULL, 0); - if (result == 0) { - return num_physical_cores; - } -#elif defined(_WIN32) - //TODO: Implement -#endif - unsigned int n_threads = std::thread::hardware_concurrency(); - return n_threads > 0 ? (n_threads <= 4 ? n_threads : n_threads / 2) : 4; -} - -struct Option { - int n_threads = -1; - std::string mode = TXT2IMG; - std::string model_path; - std::string output_path = "output.png"; - std::string init_img; - std::string prompt; - std::string negative_prompt; - float cfg_scale = 7.0f; - int w = 512; - int h = 512; - SampleMethod sample_method = EULAR_A; - int sample_steps = 20; - float strength = 0.75f; - int seed = 42; - bool verbose = false; - - void print() { - printf("Option: \n"); - printf(" n_threads: %d\n", n_threads); - printf(" mode: %s\n", mode.c_str()); - printf(" model_path: %s\n", model_path.c_str()); - printf(" output_path: %s\n", output_path.c_str()); - printf(" init_img: %s\n", init_img.c_str()); - printf(" prompt: %s\n", prompt.c_str()); - printf(" negative_prompt: %s\n", negative_prompt.c_str()); - printf(" cfg_scale: %.2f\n", cfg_scale); - printf(" width: %d\n", w); - printf(" height: %d\n", h); - printf(" sample_method: %s\n", "eular a"); - printf(" sample_steps: %d\n", sample_steps); - printf(" strength: %.2f\n", strength); - printf(" seed: %d\n", seed); - } -}; - -void print_usage(int argc, const char* argv[]) { - printf("usage: %s [arguments]\n", argv[0]); - printf("\n"); - printf("arguments:\n"); - printf(" -h, --help show this help message and exit\n"); - printf(" -M, --mode [txt2img or img2img] generation mode (default: txt2img)\n"); - printf(" -t, --threads N number of threads to use during computation (default: -1).\n"); - printf(" If threads <= 0, then threads will be set to the number of CPU physical cores\n"); - printf(" -m, --model [MODEL] path to model\n"); - printf(" -i, --init-img [IMAGE] path to the input image, required by img2img\n"); - printf(" -o, --output OUTPUT path to write result image to (default: .\\output.png)\n"); - printf(" -p, --prompt [PROMPT] the prompt to render\n"); - printf(" -n, --negative-prompt PROMPT the negative prompt (default: \"\")\n"); - printf(" --cfg-scale SCALE unconditional guidance scale: (default: 7.0)\n"); - printf(" --strength STRENGTH strength for noising/unnoising (default: 0.75)\n"); - printf(" 1.0 corresponds to full destruction of information in init image\n"); - printf(" -H, --height H image height, in pixel space (default: 512)\n"); - printf(" -W, --width W image width, in pixel space (default: 512)\n"); - printf(" --sample-method SAMPLE_METHOD sample method (default: \"eular a\")\n"); - printf(" --steps STEPS number of sample steps (default: 20)\n"); - printf(" -s SEED, --seed SEED RNG seed (default: 42, use random seed for < 0)\n"); - printf(" -v, --verbose print extra info\n"); -} - -void parse_args(int argc, const char* argv[], Option* opt) { - bool invalid_arg = false; - - for (int i = 1; i < argc; i++) { - std::string arg = argv[i]; - - if (arg == "-t" || arg == "--threads") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->n_threads = std::stoi(argv[i]); - } else if (arg == "-M" || arg == "--mode") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->mode = argv[i]; - - } else if (arg == "-m" || arg == "--model") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->model_path = argv[i]; - } else if (arg == "-i" || arg == "--init-img") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->init_img = argv[i]; - } else if (arg == "-o" || arg == "--output") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->output_path = argv[i]; - } else if (arg == "-p" || arg == "--prompt") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->prompt = argv[i]; - } else if (arg == "-n" || arg == "--negative-prompt") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->negative_prompt = argv[i]; - } else if (arg == "--cfg-scale") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->cfg_scale = std::stof(argv[i]); - } else if (arg == "--strength") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->strength = std::stof(argv[i]); - } else if (arg == "-H" || arg == "--height") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->h = std::stoi(argv[i]); - } else if (arg == "-W" || arg == "--width") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->w = std::stoi(argv[i]); - } else if (arg == "--steps") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->sample_steps = std::stoi(argv[i]); - } else if (arg == "-s" || arg == "--seed") { - if (++i >= argc) { - invalid_arg = true; - break; - } - opt->seed = std::stoi(argv[i]); - } else if (arg == "-h" || arg == "--help") { - print_usage(argc, argv); - exit(0); - } else if (arg == "-v" || arg == "--verbose") { - opt->verbose = true; - } else { - fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); - print_usage(argc, argv); - exit(1); - } - if (invalid_arg) { - fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); - print_usage(argc, argv); - exit(1); - } - } - - if (opt->n_threads <= 0) { - opt->n_threads = get_num_physical_cores(); - } - - if (opt->mode != TXT2IMG && opt->mode != IMG2IMG) { - fprintf(stderr, "error: invalid mode %s, must be one of ['%s', '%s']\n", - opt->mode.c_str(), TXT2IMG, IMG2IMG); - exit(1); - } - - if (opt->prompt.length() == 0) { - fprintf(stderr, "error: the following arguments are required: prompt\n"); - print_usage(argc, argv); - exit(1); - } - - if (opt->model_path.length() == 0) { - fprintf(stderr, "error: the following arguments are required: model_path\n"); - print_usage(argc, argv); - exit(1); - } - - if (opt->mode == IMG2IMG && opt->init_img.length() == 0) { - fprintf(stderr, "error: when using the img2img mode, the following arguments are required: init-img\n"); - print_usage(argc, argv); - exit(1); - } - - if (opt->output_path.length() == 0) { - fprintf(stderr, "error: the following arguments are required: output_path\n"); - print_usage(argc, argv); - exit(1); - } - - if (opt->w <= 0 || opt->w % 32 != 0) { - fprintf(stderr, "error: the width must be a multiple of 32\n"); - exit(1); - } - - if (opt->h <= 0 || opt->h % 32 != 0) { - fprintf(stderr, "error: the height must be a multiple of 32\n"); - exit(1); - } - - if (opt->sample_steps <= 0) { - fprintf(stderr, "error: the sample_steps must be greater than 0\n"); - exit(1); - } - - if (opt->strength < 0.f || opt->strength > 1.f) { - fprintf(stderr, "error: can only work with strength in [0.0, 1.0]\n"); - exit(1); - } -} - -int main(int argc, const char* argv[]) { - Option opt; - parse_args(argc, argv, &opt); - - if (opt.verbose) { - opt.print(); - printf("%s", sd_get_system_info().c_str()); - set_sd_log_level(SDLogLevel::DEBUG); - } - - bool vae_decode_only = true; - std::vector init_img; - if (opt.mode == IMG2IMG) { - vae_decode_only = false; - - int c = 0; - unsigned char* img_data = stbi_load(opt.init_img.c_str(), &opt.w, &opt.h, &c, 3); - if (img_data == NULL) { - fprintf(stderr, "load image from '%s' failed\n", opt.init_img.c_str()); - return 1; - } - if (c != 3) { - fprintf(stderr, "input image must be a 3 channels RGB image, but got %d channels\n", c); - free(img_data); - return 1; - } - if (opt.w <= 0 || opt.w % 32 != 0) { - fprintf(stderr, "error: the width of image must be a multiple of 32\n"); - free(img_data); - return 1; - } - if (opt.h <= 0 || opt.h % 32 != 0) { - fprintf(stderr, "error: the height of image must be a multiple of 32\n"); - free(img_data); - return 1; - } - init_img.assign(img_data, img_data + (opt.w * opt.h * c)); - } - - StableDiffusion sd(opt.n_threads, vae_decode_only, true); - if (!sd.load_from_file(opt.model_path)) { - return 1; - } - - std::vector img; - if (opt.mode == TXT2IMG) { - img = sd.txt2img(opt.prompt, - opt.negative_prompt, - opt.cfg_scale, - opt.w, - opt.h, - opt.sample_method, - opt.sample_steps, - opt.seed); - } else { - img = sd.img2img(init_img, - opt.prompt, - opt.negative_prompt, - opt.cfg_scale, - opt.w, - opt.h, - opt.sample_method, - opt.sample_steps, - opt.strength, - opt.seed); - } - - if (img.size() == 0) { - fprintf(stderr, "generate failed\n"); - return 1; - } - - stbi_write_png(opt.output_path.c_str(), opt.w, opt.h, 3, img.data(), 0); - printf("save result image to '%s'\n", opt.output_path.c_str()); - - return 0; -} \ No newline at end of file diff --git a/face_detect.py b/face_detect.py new file mode 100644 index 000000000..7131af31f --- /dev/null +++ b/face_detect.py @@ -0,0 +1,88 @@ +import os +import sys + +import numpy as np +import torch +from diffusers.utils import load_image +# pip install insightface==0.7.3 +from insightface.app import FaceAnalysis +from insightface.data import get_image as ins_get_image +from safetensors.torch import save_file + +### +# https://github.com/cubiq/ComfyUI_IPAdapter_plus/issues/165#issue-2055829543 +### +class FaceAnalysis2(FaceAnalysis): + # NOTE: allows setting det_size for each detection call. + # the model allows it but the wrapping code from insightface + # doesn't show it, and people end up loading duplicate models + # for different sizes where there is absolutely no need to + def get(self, img, max_num=0, det_size=(640, 640)): + if det_size is not None: + self.det_model.input_size = det_size + + return super().get(img, max_num) + +def analyze_faces(face_analysis: FaceAnalysis, img_data: np.ndarray, det_size=(640, 640)): + # NOTE: try detect faces, if no faces detected, lower det_size until it does + detection_sizes = [None] + [(size, size) for size in range(640, 256, -64)] + [(256, 256)] + + for size in detection_sizes: + faces = face_analysis.get(img_data, det_size=size) + if len(faces) > 0: + return faces + + return [] + +if __name__ == "__main__": + #face_detector = FaceAnalysis2(providers=['CUDAExecutionProvider'], allowed_modules=['detection', 'recognition']) + face_detector = FaceAnalysis2(providers=['CPUExecutionProvider'], allowed_modules=['detection', 'recognition']) + face_detector.prepare(ctx_id=0, det_size=(640, 640)) + #input_folder_name = './scarletthead_woman' + input_folder_name = sys.argv[1] + image_basename_list = os.listdir(input_folder_name) + image_path_list = sorted([os.path.join(input_folder_name, basename) for basename in image_basename_list]) + + input_id_images = [] + for image_path in image_path_list: + input_id_images.append(load_image(image_path)) + + id_embed_list = [] + + for img in input_id_images: + img = np.array(img) + img = img[:, :, ::-1] + faces = analyze_faces(face_detector, img) + if len(faces) > 0: + id_embed_list.append(torch.from_numpy((faces[0]['embedding']))) + + if len(id_embed_list) == 0: + raise ValueError(f"No face detected in input image pool") + + id_embeds = torch.stack(id_embed_list) + + # for r in id_embeds: + # print(r) + # #torch.save(id_embeds, input_folder_name+'/id_embeds.pt'); + # weights = dict() + # weights["id_embeds"] = id_embeds + # save_file(weights, input_folder_name+'/id_embeds.safetensors') + + binary_data = id_embeds.numpy().tobytes() + two = 4 + zero = 0 + one = 1 + tensor_name = "id_embeds" +# Write binary data to a file + with open(input_folder_name+'/id_embeds.bin', "wb") as f: + f.write(two.to_bytes(4, byteorder='little')) + f.write((len(tensor_name)).to_bytes(4, byteorder='little')) + f.write(zero.to_bytes(4, byteorder='little')) + f.write((id_embeds.shape[1]).to_bytes(4, byteorder='little')) + f.write((id_embeds.shape[0]).to_bytes(4, byteorder='little')) + f.write(one.to_bytes(4, byteorder='little')) + f.write(one.to_bytes(4, byteorder='little')) + f.write(tensor_name.encode('ascii')) + f.write(binary_data) + + \ No newline at end of file diff --git a/flux.hpp b/flux.hpp new file mode 100644 index 000000000..11045918f --- /dev/null +++ b/flux.hpp @@ -0,0 +1,1279 @@ +#ifndef __FLUX_HPP__ +#define __FLUX_HPP__ + +#include + +#include "ggml_extend.hpp" +#include "model.h" + +#define FLUX_GRAPH_SIZE 10240 + +namespace Flux { + + struct MLPEmbedder : public UnaryBlock { + public: + MLPEmbedder(int64_t in_dim, int64_t hidden_dim) { + blocks["in_layer"] = std::shared_ptr(new Linear(in_dim, hidden_dim, true)); + blocks["out_layer"] = std::shared_ptr(new Linear(hidden_dim, hidden_dim, true)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [..., in_dim] + // return: [..., hidden_dim] + auto in_layer = std::dynamic_pointer_cast(blocks["in_layer"]); + auto out_layer = std::dynamic_pointer_cast(blocks["out_layer"]); + + x = in_layer->forward(ctx, x); + x = ggml_silu_inplace(ctx, x); + x = out_layer->forward(ctx, x); + return x; + } + }; + + class RMSNorm : public UnaryBlock { + protected: + int64_t hidden_size; + float eps; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "scale") != tensor_types.end()) ? tensor_types[prefix + "scale"] : GGML_TYPE_F32; + params["scale"] = ggml_new_tensor_1d(ctx, wtype, hidden_size); + } + + public: + RMSNorm(int64_t hidden_size, + float eps = 1e-06f) + : hidden_size(hidden_size), + eps(eps) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["scale"]; + x = ggml_rms_norm(ctx, x, eps); + x = ggml_mul(ctx, x, w); + return x; + } + }; + + struct QKNorm : public GGMLBlock { + public: + QKNorm(int64_t dim) { + blocks["query_norm"] = std::shared_ptr(new RMSNorm(dim)); + blocks["key_norm"] = std::shared_ptr(new RMSNorm(dim)); + } + + struct ggml_tensor* query_norm(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [..., dim] + // return: [..., dim] + auto norm = std::dynamic_pointer_cast(blocks["query_norm"]); + + x = norm->forward(ctx, x); + return x; + } + + struct ggml_tensor* key_norm(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [..., dim] + // return: [..., dim] + auto norm = std::dynamic_pointer_cast(blocks["key_norm"]); + + x = norm->forward(ctx, x); + return x; + } + }; + + __STATIC_INLINE__ struct ggml_tensor* apply_rope(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* pe) { + // x: [N, L, n_head, d_head] + // pe: [L, d_head/2, 2, 2] + int64_t d_head = x->ne[0]; + int64_t n_head = x->ne[1]; + int64_t L = x->ne[2]; + int64_t N = x->ne[3]; + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N, n_head, L, d_head] + x = ggml_reshape_4d(ctx, x, 2, d_head / 2, L, n_head * N); // [N * n_head, L, d_head/2, 2] + x = ggml_cont(ctx, ggml_permute(ctx, x, 3, 0, 1, 2)); // [2, N * n_head, L, d_head/2] + + int64_t offset = x->nb[2] * x->ne[2]; + auto x_0 = ggml_view_3d(ctx, x, x->ne[0], x->ne[1], x->ne[2], x->nb[1], x->nb[2], offset * 0); // [N * n_head, L, d_head/2] + auto x_1 = ggml_view_3d(ctx, x, x->ne[0], x->ne[1], x->ne[2], x->nb[1], x->nb[2], offset * 1); // [N * n_head, L, d_head/2] + x_0 = ggml_reshape_4d(ctx, x_0, 1, x_0->ne[0], x_0->ne[1], x_0->ne[2]); // [N * n_head, L, d_head/2, 1] + x_1 = ggml_reshape_4d(ctx, x_1, 1, x_1->ne[0], x_1->ne[1], x_1->ne[2]); // [N * n_head, L, d_head/2, 1] + auto temp_x = ggml_new_tensor_4d(ctx, x_0->type, 2, x_0->ne[1], x_0->ne[2], x_0->ne[3]); + x_0 = ggml_repeat(ctx, x_0, temp_x); // [N * n_head, L, d_head/2, 2] + x_1 = ggml_repeat(ctx, x_1, temp_x); // [N * n_head, L, d_head/2, 2] + + pe = ggml_cont(ctx, ggml_permute(ctx, pe, 3, 0, 1, 2)); // [2, L, d_head/2, 2] + offset = pe->nb[2] * pe->ne[2]; + auto pe_0 = ggml_view_3d(ctx, pe, pe->ne[0], pe->ne[1], pe->ne[2], pe->nb[1], pe->nb[2], offset * 0); // [L, d_head/2, 2] + auto pe_1 = ggml_view_3d(ctx, pe, pe->ne[0], pe->ne[1], pe->ne[2], pe->nb[1], pe->nb[2], offset * 1); // [L, d_head/2, 2] + + auto x_out = ggml_add_inplace(ctx, ggml_mul(ctx, x_0, pe_0), ggml_mul(ctx, x_1, pe_1)); // [N * n_head, L, d_head/2, 2] + x_out = ggml_reshape_3d(ctx, x_out, d_head, L, n_head * N); // [N*n_head, L, d_head] + return x_out; + } + + __STATIC_INLINE__ struct ggml_tensor* attention(struct ggml_context* ctx, + struct ggml_tensor* q, + struct ggml_tensor* k, + struct ggml_tensor* v, + struct ggml_tensor* pe, + struct ggml_tensor* mask, + bool flash_attn) { + // q,k,v: [N, L, n_head, d_head] + // pe: [L, d_head/2, 2, 2] + // return: [N, L, n_head*d_head] + q = apply_rope(ctx, q, pe); // [N*n_head, L, d_head] + k = apply_rope(ctx, k, pe); // [N*n_head, L, d_head] + + auto x = ggml_nn_attention_ext(ctx, q, k, v, v->ne[1], mask, false, true, flash_attn); // [N, L, n_head*d_head] + return x; + } + + struct SelfAttention : public GGMLBlock { + public: + int64_t num_heads; + bool flash_attn; + + public: + SelfAttention(int64_t dim, + int64_t num_heads = 8, + bool qkv_bias = false, + bool flash_attn = false) + : num_heads(num_heads) { + int64_t head_dim = dim / num_heads; + blocks["qkv"] = std::shared_ptr(new Linear(dim, dim * 3, qkv_bias)); + blocks["norm"] = std::shared_ptr(new QKNorm(head_dim)); + blocks["proj"] = std::shared_ptr(new Linear(dim, dim)); + } + + std::vector pre_attention(struct ggml_context* ctx, struct ggml_tensor* x) { + auto qkv_proj = std::dynamic_pointer_cast(blocks["qkv"]); + auto norm = std::dynamic_pointer_cast(blocks["norm"]); + + auto qkv = qkv_proj->forward(ctx, x); + auto qkv_vec = split_qkv(ctx, qkv); + int64_t head_dim = qkv_vec[0]->ne[0] / num_heads; + auto q = ggml_reshape_4d(ctx, qkv_vec[0], head_dim, num_heads, qkv_vec[0]->ne[1], qkv_vec[0]->ne[2]); + auto k = ggml_reshape_4d(ctx, qkv_vec[1], head_dim, num_heads, qkv_vec[1]->ne[1], qkv_vec[1]->ne[2]); + auto v = ggml_reshape_4d(ctx, qkv_vec[2], head_dim, num_heads, qkv_vec[2]->ne[1], qkv_vec[2]->ne[2]); + q = norm->query_norm(ctx, q); + k = norm->key_norm(ctx, k); + return {q, k, v}; + } + + struct ggml_tensor* post_attention(struct ggml_context* ctx, struct ggml_tensor* x) { + auto proj = std::dynamic_pointer_cast(blocks["proj"]); + + x = proj->forward(ctx, x); // [N, n_token, dim] + return x; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* pe, struct ggml_tensor* mask) { + // x: [N, n_token, dim] + // pe: [n_token, d_head/2, 2, 2] + // return [N, n_token, dim] + auto qkv = pre_attention(ctx, x); // q,k,v: [N, n_token, n_head, d_head] + x = attention(ctx, qkv[0], qkv[1], qkv[2], pe, mask, flash_attn); // [N, n_token, dim] + x = post_attention(ctx, x); // [N, n_token, dim] + return x; + } + }; + + struct ModulationOut { + ggml_tensor* shift = NULL; + ggml_tensor* scale = NULL; + ggml_tensor* gate = NULL; + + ModulationOut(ggml_tensor* shift = NULL, ggml_tensor* scale = NULL, ggml_tensor* gate = NULL) + : shift(shift), scale(scale), gate(gate) {} + + ModulationOut(struct ggml_context* ctx, ggml_tensor* vec, int64_t offset) { + int64_t stride = vec->nb[1] * vec->ne[1]; + shift = ggml_view_2d(ctx, vec, vec->ne[0], vec->ne[1], vec->nb[1], stride * (offset + 0)); // [N, dim] + scale = ggml_view_2d(ctx, vec, vec->ne[0], vec->ne[1], vec->nb[1], stride * (offset + 1)); // [N, dim] + gate = ggml_view_2d(ctx, vec, vec->ne[0], vec->ne[1], vec->nb[1], stride * (offset + 2)); // [N, dim] + } + }; + + struct Modulation : public GGMLBlock { + public: + bool is_double; + int multiplier; + + public: + Modulation(int64_t dim, bool is_double) + : is_double(is_double) { + multiplier = is_double ? 6 : 3; + blocks["lin"] = std::shared_ptr(new Linear(dim, dim * multiplier)); + } + + std::vector forward(struct ggml_context* ctx, struct ggml_tensor* vec) { + // x: [N, dim] + // return: [ModulationOut, ModulationOut] + auto lin = std::dynamic_pointer_cast(blocks["lin"]); + + auto out = ggml_silu(ctx, vec); + out = lin->forward(ctx, out); // [N, multiplier*dim] + + auto m = ggml_reshape_3d(ctx, out, vec->ne[0], multiplier, vec->ne[1]); // [N, multiplier, dim] + m = ggml_cont(ctx, ggml_permute(ctx, m, 0, 2, 1, 3)); // [multiplier, N, dim] + + ModulationOut m_0 = ModulationOut(ctx, m, 0); + if (is_double) { + return {m_0, ModulationOut(ctx, m, 3)}; + } + + return {m_0, ModulationOut()}; + } + }; + + __STATIC_INLINE__ struct ggml_tensor* modulate(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* shift, + struct ggml_tensor* scale) { + // x: [N, L, C] + // scale: [N, C] + // shift: [N, C] + scale = ggml_reshape_3d(ctx, scale, scale->ne[0], 1, scale->ne[1]); // [N, 1, C] + shift = ggml_reshape_3d(ctx, shift, shift->ne[0], 1, shift->ne[1]); // [N, 1, C] + x = ggml_add(ctx, x, ggml_mul(ctx, x, scale)); + x = ggml_add(ctx, x, shift); + return x; + } + + struct DoubleStreamBlock : public GGMLBlock { + bool flash_attn; + bool prune_mod; + int idx = 0; + + public: + DoubleStreamBlock(int64_t hidden_size, + int64_t num_heads, + float mlp_ratio, + int idx = 0, + bool qkv_bias = false, + bool flash_attn = false, + bool prune_mod = false) + : idx(idx), flash_attn(flash_attn), prune_mod(prune_mod) { + int64_t mlp_hidden_dim = hidden_size * mlp_ratio; + if (!prune_mod) { + blocks["img_mod"] = std::shared_ptr(new Modulation(hidden_size, true)); + } + blocks["img_norm1"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-6f, false)); + blocks["img_attn"] = std::shared_ptr(new SelfAttention(hidden_size, num_heads, qkv_bias, flash_attn)); + + blocks["img_norm2"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-6f, false)); + blocks["img_mlp.0"] = std::shared_ptr(new Linear(hidden_size, mlp_hidden_dim)); + // img_mlp.1 is nn.GELU(approximate="tanh") + blocks["img_mlp.2"] = std::shared_ptr(new Linear(mlp_hidden_dim, hidden_size)); + + if (!prune_mod) { + blocks["txt_mod"] = std::shared_ptr(new Modulation(hidden_size, true)); + } + blocks["txt_norm1"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-6f, false)); + blocks["txt_attn"] = std::shared_ptr(new SelfAttention(hidden_size, num_heads, qkv_bias, flash_attn)); + + blocks["txt_norm2"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-6f, false)); + blocks["txt_mlp.0"] = std::shared_ptr(new Linear(hidden_size, mlp_hidden_dim)); + // img_mlp.1 is nn.GELU(approximate="tanh") + blocks["txt_mlp.2"] = std::shared_ptr(new Linear(mlp_hidden_dim, hidden_size)); + } + + std::vector get_distil_img_mod(struct ggml_context* ctx, struct ggml_tensor* vec) { + // TODO: not hardcoded? + const int single_blocks_count = 38; + const int double_blocks_count = 19; + + int64_t offset = 6 * idx + 3 * single_blocks_count; + return {ModulationOut(ctx, vec, offset), ModulationOut(ctx, vec, offset + 3)}; + } + + std::vector get_distil_txt_mod(struct ggml_context* ctx, struct ggml_tensor* vec) { + // TODO: not hardcoded? + const int single_blocks_count = 38; + const int double_blocks_count = 19; + + int64_t offset = 6 * idx + 6 * double_blocks_count + 3 * single_blocks_count; + return {ModulationOut(ctx, vec, offset), ModulationOut(ctx, vec, offset + 3)}; + } + + std::pair forward(struct ggml_context* ctx, + struct ggml_tensor* img, + struct ggml_tensor* txt, + struct ggml_tensor* vec, + struct ggml_tensor* pe, + struct ggml_tensor* mask = NULL) { + // img: [N, n_img_token, hidden_size] + // txt: [N, n_txt_token, hidden_size] + // pe: [n_img_token + n_txt_token, d_head/2, 2, 2] + // return: ([N, n_img_token, hidden_size], [N, n_txt_token, hidden_size]) + auto img_norm1 = std::dynamic_pointer_cast(blocks["img_norm1"]); + auto img_attn = std::dynamic_pointer_cast(blocks["img_attn"]); + + auto img_norm2 = std::dynamic_pointer_cast(blocks["img_norm2"]); + auto img_mlp_0 = std::dynamic_pointer_cast(blocks["img_mlp.0"]); + auto img_mlp_2 = std::dynamic_pointer_cast(blocks["img_mlp.2"]); + + auto txt_norm1 = std::dynamic_pointer_cast(blocks["txt_norm1"]); + auto txt_attn = std::dynamic_pointer_cast(blocks["txt_attn"]); + + auto txt_norm2 = std::dynamic_pointer_cast(blocks["txt_norm2"]); + auto txt_mlp_0 = std::dynamic_pointer_cast(blocks["txt_mlp.0"]); + auto txt_mlp_2 = std::dynamic_pointer_cast(blocks["txt_mlp.2"]); + + std::vector img_mods; + if (prune_mod) { + img_mods = get_distil_img_mod(ctx, vec); + } else { + auto img_mod = std::dynamic_pointer_cast(blocks["img_mod"]); + img_mods = img_mod->forward(ctx, vec); + } + ModulationOut img_mod1 = img_mods[0]; + ModulationOut img_mod2 = img_mods[1]; + std::vector txt_mods; + if (prune_mod) { + txt_mods = get_distil_txt_mod(ctx, vec); + } else { + auto txt_mod = std::dynamic_pointer_cast(blocks["txt_mod"]); + txt_mods = txt_mod->forward(ctx, vec); + } + ModulationOut txt_mod1 = txt_mods[0]; + ModulationOut txt_mod2 = txt_mods[1]; + + // prepare image for attention + auto img_modulated = img_norm1->forward(ctx, img); + img_modulated = Flux::modulate(ctx, img_modulated, img_mod1.shift, img_mod1.scale); + auto img_qkv = img_attn->pre_attention(ctx, img_modulated); // q,k,v: [N, n_img_token, n_head, d_head] + auto img_q = img_qkv[0]; + auto img_k = img_qkv[1]; + auto img_v = img_qkv[2]; + + // prepare txt for attention + auto txt_modulated = txt_norm1->forward(ctx, txt); + txt_modulated = Flux::modulate(ctx, txt_modulated, txt_mod1.shift, txt_mod1.scale); + auto txt_qkv = txt_attn->pre_attention(ctx, txt_modulated); // q,k,v: [N, n_txt_token, n_head, d_head] + auto txt_q = txt_qkv[0]; + auto txt_k = txt_qkv[1]; + auto txt_v = txt_qkv[2]; + + // run actual attention + auto q = ggml_concat(ctx, txt_q, img_q, 2); // [N, n_txt_token + n_img_token, n_head, d_head] + auto k = ggml_concat(ctx, txt_k, img_k, 2); // [N, n_txt_token + n_img_token, n_head, d_head] + auto v = ggml_concat(ctx, txt_v, img_v, 2); // [N, n_txt_token + n_img_token, n_head, d_head] + + auto attn = attention(ctx, q, k, v, pe, mask, flash_attn); // [N, n_txt_token + n_img_token, n_head*d_head] + attn = ggml_cont(ctx, ggml_permute(ctx, attn, 0, 2, 1, 3)); // [n_txt_token + n_img_token, N, hidden_size] + auto txt_attn_out = ggml_view_3d(ctx, + attn, + attn->ne[0], + attn->ne[1], + txt->ne[1], + attn->nb[1], + attn->nb[2], + 0); // [n_txt_token, N, hidden_size] + txt_attn_out = ggml_cont(ctx, ggml_permute(ctx, txt_attn_out, 0, 2, 1, 3)); // [N, n_txt_token, hidden_size] + auto img_attn_out = ggml_view_3d(ctx, + attn, + attn->ne[0], + attn->ne[1], + img->ne[1], + attn->nb[1], + attn->nb[2], + attn->nb[2] * txt->ne[1]); // [n_img_token, N, hidden_size] + img_attn_out = ggml_cont(ctx, ggml_permute(ctx, img_attn_out, 0, 2, 1, 3)); // [N, n_img_token, hidden_size] + + // calculate the img bloks + img = ggml_add(ctx, img, ggml_mul(ctx, img_attn->post_attention(ctx, img_attn_out), img_mod1.gate)); + + auto img_mlp_out = img_mlp_0->forward(ctx, Flux::modulate(ctx, img_norm2->forward(ctx, img), img_mod2.shift, img_mod2.scale)); + img_mlp_out = ggml_gelu_inplace(ctx, img_mlp_out); + img_mlp_out = img_mlp_2->forward(ctx, img_mlp_out); + + img = ggml_add(ctx, img, ggml_mul(ctx, img_mlp_out, img_mod2.gate)); + + // calculate the txt bloks + txt = ggml_add(ctx, txt, ggml_mul(ctx, txt_attn->post_attention(ctx, txt_attn_out), txt_mod1.gate)); + + auto txt_mlp_out = txt_mlp_0->forward(ctx, Flux::modulate(ctx, txt_norm2->forward(ctx, txt), txt_mod2.shift, txt_mod2.scale)); + txt_mlp_out = ggml_gelu_inplace(ctx, txt_mlp_out); + txt_mlp_out = txt_mlp_2->forward(ctx, txt_mlp_out); + + txt = ggml_add(ctx, txt, ggml_mul(ctx, txt_mlp_out, txt_mod2.gate)); + + return {img, txt}; + } + }; + + struct SingleStreamBlock : public GGMLBlock { + public: + int64_t num_heads; + int64_t hidden_size; + int64_t mlp_hidden_dim; + bool flash_attn; + bool prune_mod; + int idx = 0; + + public: + SingleStreamBlock(int64_t hidden_size, + int64_t num_heads, + float mlp_ratio = 4.0f, + int idx = 0, + float qk_scale = 0.f, + bool flash_attn = false, + bool prune_mod = false) + : hidden_size(hidden_size), num_heads(num_heads), idx(idx), flash_attn(flash_attn), prune_mod(prune_mod) { + int64_t head_dim = hidden_size / num_heads; + float scale = qk_scale; + if (scale <= 0.f) { + scale = 1 / sqrt((float)head_dim); + } + mlp_hidden_dim = hidden_size * mlp_ratio; + + blocks["linear1"] = std::shared_ptr(new Linear(hidden_size, hidden_size * 3 + mlp_hidden_dim)); + blocks["linear2"] = std::shared_ptr(new Linear(hidden_size + mlp_hidden_dim, hidden_size)); + blocks["norm"] = std::shared_ptr(new QKNorm(head_dim)); + blocks["pre_norm"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-6f, false)); + // mlp_act is nn.GELU(approximate="tanh") + if (!prune_mod) { + blocks["modulation"] = std::shared_ptr(new Modulation(hidden_size, false)); + } + } + + ModulationOut get_distil_mod(struct ggml_context* ctx, struct ggml_tensor* vec) { + int64_t offset = 3 * idx; + return ModulationOut(ctx, vec, offset); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* vec, + struct ggml_tensor* pe, + struct ggml_tensor* mask = NULL) { + // x: [N, n_token, hidden_size] + // pe: [n_token, d_head/2, 2, 2] + // return: [N, n_token, hidden_size] + + auto linear1 = std::dynamic_pointer_cast(blocks["linear1"]); + auto linear2 = std::dynamic_pointer_cast(blocks["linear2"]); + auto norm = std::dynamic_pointer_cast(blocks["norm"]); + auto pre_norm = std::dynamic_pointer_cast(blocks["pre_norm"]); + ModulationOut mod; + if (prune_mod) { + mod = get_distil_mod(ctx, vec); + } else { + auto modulation = std::dynamic_pointer_cast(blocks["modulation"]); + + mod = modulation->forward(ctx, vec)[0]; + } + auto x_mod = Flux::modulate(ctx, pre_norm->forward(ctx, x), mod.shift, mod.scale); + auto qkv_mlp = linear1->forward(ctx, x_mod); // [N, n_token, hidden_size * 3 + mlp_hidden_dim] + qkv_mlp = ggml_cont(ctx, ggml_permute(ctx, qkv_mlp, 2, 0, 1, 3)); // [hidden_size * 3 + mlp_hidden_dim, N, n_token] + + auto qkv = ggml_view_3d(ctx, + qkv_mlp, + qkv_mlp->ne[0], + qkv_mlp->ne[1], + hidden_size * 3, + qkv_mlp->nb[1], + qkv_mlp->nb[2], + 0); // [hidden_size * 3 , N, n_token] + qkv = ggml_cont(ctx, ggml_permute(ctx, qkv, 1, 2, 0, 3)); // [N, n_token, hidden_size * 3] + auto mlp = ggml_view_3d(ctx, + qkv_mlp, + qkv_mlp->ne[0], + qkv_mlp->ne[1], + mlp_hidden_dim, + qkv_mlp->nb[1], + qkv_mlp->nb[2], + qkv_mlp->nb[2] * hidden_size * 3); // [mlp_hidden_dim , N, n_token] + mlp = ggml_cont(ctx, ggml_permute(ctx, mlp, 1, 2, 0, 3)); // [N, n_token, mlp_hidden_dim] + + auto qkv_vec = split_qkv(ctx, qkv); // q,k,v: [N, n_token, hidden_size] + int64_t head_dim = hidden_size / num_heads; + auto q = ggml_reshape_4d(ctx, qkv_vec[0], head_dim, num_heads, qkv_vec[0]->ne[1], qkv_vec[0]->ne[2]); // [N, n_token, n_head, d_head] + auto k = ggml_reshape_4d(ctx, qkv_vec[1], head_dim, num_heads, qkv_vec[1]->ne[1], qkv_vec[1]->ne[2]); // [N, n_token, n_head, d_head] + auto v = ggml_reshape_4d(ctx, qkv_vec[2], head_dim, num_heads, qkv_vec[2]->ne[1], qkv_vec[2]->ne[2]); // [N, n_token, n_head, d_head] + q = norm->query_norm(ctx, q); + k = norm->key_norm(ctx, k); + auto attn = attention(ctx, q, k, v, pe, mask, flash_attn); // [N, n_token, hidden_size] + + auto attn_mlp = ggml_concat(ctx, attn, ggml_gelu_inplace(ctx, mlp), 0); // [N, n_token, hidden_size + mlp_hidden_dim] + auto output = linear2->forward(ctx, attn_mlp); // [N, n_token, hidden_size] + + output = ggml_add(ctx, x, ggml_mul(ctx, output, mod.gate)); + return output; + } + }; + + struct LastLayer : public GGMLBlock { + bool prune_mod; + + public: + LastLayer(int64_t hidden_size, + int64_t patch_size, + int64_t out_channels, + bool prune_mod = false) + : prune_mod(prune_mod) { + blocks["norm_final"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-06f, false)); + blocks["linear"] = std::shared_ptr(new Linear(hidden_size, patch_size * patch_size * out_channels)); + if (!prune_mod) { + blocks["adaLN_modulation.1"] = std::shared_ptr(new Linear(hidden_size, 2 * hidden_size)); + } + } + + ModulationOut get_distil_mod(struct ggml_context* ctx, struct ggml_tensor* vec) { + int64_t offset = vec->ne[2] - 2; + int64_t stride = vec->nb[1] * vec->ne[1]; + auto shift = ggml_view_2d(ctx, vec, vec->ne[0], vec->ne[1], vec->nb[1], stride * (offset + 0)); // [N, dim] + auto scale = ggml_view_2d(ctx, vec, vec->ne[0], vec->ne[1], vec->nb[1], stride * (offset + 1)); // [N, dim] + // No gate + return ModulationOut(shift, scale, NULL); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* c) { + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + // return: [N, n_token, patch_size * patch_size * out_channels] + auto norm_final = std::dynamic_pointer_cast(blocks["norm_final"]); + auto linear = std::dynamic_pointer_cast(blocks["linear"]); + struct ggml_tensor *shift, *scale; + if (prune_mod) { + auto mod = get_distil_mod(ctx, c); + shift = mod.shift; + scale = mod.scale; + } else { + auto adaLN_modulation_1 = std::dynamic_pointer_cast(blocks["adaLN_modulation.1"]); + + auto m = adaLN_modulation_1->forward(ctx, ggml_silu(ctx, c)); // [N, 2 * hidden_size] + m = ggml_reshape_3d(ctx, m, c->ne[0], 2, c->ne[1]); // [N, 2, hidden_size] + m = ggml_cont(ctx, ggml_permute(ctx, m, 0, 2, 1, 3)); // [2, N, hidden_size] + + int64_t offset = m->nb[1] * m->ne[1]; + shift = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 0); // [N, hidden_size] + scale = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 1); // [N, hidden_size] + } + + x = Flux::modulate(ctx, norm_final->forward(ctx, x), shift, scale); + x = linear->forward(ctx, x); + + return x; + } + }; + + struct ChromaApproximator : public GGMLBlock { + int64_t inner_size = 5120; + int64_t n_layers = 5; + ChromaApproximator(int64_t in_channels = 64, int64_t hidden_size = 3072) { + blocks["in_proj"] = std::shared_ptr(new Linear(in_channels, inner_size, true)); + for (int i = 0; i < n_layers; i++) { + blocks["norms." + std::to_string(i)] = std::shared_ptr(new RMSNorm(inner_size)); + blocks["layers." + std::to_string(i)] = std::shared_ptr(new MLPEmbedder(inner_size, inner_size)); + } + blocks["out_proj"] = std::shared_ptr(new Linear(inner_size, hidden_size, true)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + auto in_proj = std::dynamic_pointer_cast(blocks["in_proj"]); + auto out_proj = std::dynamic_pointer_cast(blocks["out_proj"]); + + x = in_proj->forward(ctx, x); + for (int i = 0; i < n_layers; i++) { + auto norm = std::dynamic_pointer_cast(blocks["norms." + std::to_string(i)]); + auto embed = std::dynamic_pointer_cast(blocks["layers." + std::to_string(i)]); + x = ggml_add_inplace(ctx, x, embed->forward(ctx, norm->forward(ctx, x))); + } + x = out_proj->forward(ctx, x); + + return x; + } + }; + + struct FluxParams { + int64_t in_channels = 64; + int64_t out_channels = 64; + int64_t vec_in_dim = 768; + int64_t context_in_dim = 4096; + int64_t hidden_size = 3072; + float mlp_ratio = 4.0f; + int64_t num_heads = 24; + int64_t depth = 19; + int64_t depth_single_blocks = 38; + std::vector axes_dim = {16, 56, 56}; + int64_t axes_dim_sum = 128; + int theta = 10000; + bool qkv_bias = true; + bool guidance_embed = true; + bool flash_attn = true; + bool is_chroma = false; + }; + + struct Flux : public GGMLBlock { + public: + std::vector linspace(float start, float end, int num) { + std::vector result(num); + float step = (end - start) / (num - 1); + for (int i = 0; i < num; ++i) { + result[i] = start + i * step; + } + return result; + } + + std::vector> transpose(const std::vector>& mat) { + int rows = mat.size(); + int cols = mat[0].size(); + std::vector> transposed(cols, std::vector(rows)); + for (int i = 0; i < rows; ++i) { + for (int j = 0; j < cols; ++j) { + transposed[j][i] = mat[i][j]; + } + } + return transposed; + } + + std::vector flatten(const std::vector>& vec) { + std::vector flat_vec; + for (const auto& sub_vec : vec) { + flat_vec.insert(flat_vec.end(), sub_vec.begin(), sub_vec.end()); + } + return flat_vec; + } + + std::vector> rope(const std::vector& pos, int dim, int theta) { + assert(dim % 2 == 0); + int half_dim = dim / 2; + + std::vector scale = linspace(0, (dim * 1.0f - 2) / dim, half_dim); + + std::vector omega(half_dim); + for (int i = 0; i < half_dim; ++i) { + omega[i] = 1.0 / std::pow(theta, scale[i]); + } + + int pos_size = pos.size(); + std::vector> out(pos_size, std::vector(half_dim)); + for (int i = 0; i < pos_size; ++i) { + for (int j = 0; j < half_dim; ++j) { + out[i][j] = pos[i] * omega[j]; + } + } + + std::vector> result(pos_size, std::vector(half_dim * 4)); + for (int i = 0; i < pos_size; ++i) { + for (int j = 0; j < half_dim; ++j) { + result[i][4 * j] = std::cos(out[i][j]); + result[i][4 * j + 1] = -std::sin(out[i][j]); + result[i][4 * j + 2] = std::sin(out[i][j]); + result[i][4 * j + 3] = std::cos(out[i][j]); + } + } + + return result; + } + + // Generate IDs for image patches and text + std::vector> gen_txt_ids(int bs, int context_len) { + return std::vector>(bs * context_len, std::vector(3, 0.0)); + } + + std::vector> gen_img_ids(int h, int w, int patch_size, int bs, int index = 0, int h_offset = 0, int w_offset = 0) { + int h_len = (h + (patch_size / 2)) / patch_size; + int w_len = (w + (patch_size / 2)) / patch_size; + + std::vector> img_ids(h_len * w_len, std::vector(3, 0.0)); + + std::vector row_ids = linspace(h_offset, h_len - 1 + h_offset, h_len); + std::vector col_ids = linspace(w_offset, w_len - 1 + w_offset, w_len); + + for (int i = 0; i < h_len; ++i) { + for (int j = 0; j < w_len; ++j) { + img_ids[i * w_len + j][0] = index; + img_ids[i * w_len + j][1] = row_ids[i]; + img_ids[i * w_len + j][2] = col_ids[j]; + } + } + + std::vector> img_ids_repeated(bs * img_ids.size(), std::vector(3)); + for (int i = 0; i < bs; ++i) { + for (int j = 0; j < img_ids.size(); ++j) { + img_ids_repeated[i * img_ids.size() + j] = img_ids[j]; + } + } + return img_ids_repeated; + } + + std::vector> concat_ids(const std::vector>& a, + const std::vector>& b, + int bs) { + size_t a_len = a.size() / bs; + size_t b_len = b.size() / bs; + std::vector> ids(a.size() + b.size(), std::vector(3)); + for (int i = 0; i < bs; ++i) { + for (int j = 0; j < a_len; ++j) { + ids[i * (a_len + b_len) + j] = a[i * a_len + j]; + } + for (int j = 0; j < b_len; ++j) { + ids[i * (a_len + b_len) + a_len + j] = b[i * b_len + j]; + } + } + return ids; + } + + std::vector> gen_ids(int h, int w, int patch_size, int bs, int context_len, std::vector ref_latents) { + auto txt_ids = gen_txt_ids(bs, context_len); + auto img_ids = gen_img_ids(h, w, patch_size, bs); + + auto ids = concat_ids(txt_ids, img_ids, bs); + uint64_t curr_h_offset = 0; + uint64_t curr_w_offset = 0; + for (ggml_tensor* ref : ref_latents) { + uint64_t h_offset = 0; + uint64_t w_offset = 0; + if (ref->ne[1] + curr_h_offset > ref->ne[0] + curr_w_offset) { + w_offset = curr_w_offset; + } else { + h_offset = curr_h_offset; + } + + auto ref_ids = gen_img_ids(ref->ne[1], ref->ne[0], patch_size, bs, 1, h_offset, w_offset); + ids = concat_ids(ids, ref_ids, bs); + + curr_h_offset = std::max(curr_h_offset, ref->ne[1] + h_offset); + curr_w_offset = std::max(curr_w_offset, ref->ne[0] + w_offset); + } + return ids; + } + + // Generate positional embeddings + std::vector gen_pe(int h, int w, int patch_size, int bs, int context_len, std::vector ref_latents, int theta, const std::vector& axes_dim) { + std::vector> ids = gen_ids(h, w, patch_size, bs, context_len, ref_latents); + std::vector> trans_ids = transpose(ids); + size_t pos_len = ids.size(); + int num_axes = axes_dim.size(); + for (int i = 0; i < pos_len; i++) { + // std::cout << trans_ids[0][i] << " " << trans_ids[1][i] << " " << trans_ids[2][i] << std::endl; + } + + int emb_dim = 0; + for (int d : axes_dim) + emb_dim += d / 2; + + std::vector> emb(bs * pos_len, std::vector(emb_dim * 2 * 2, 0.0)); + int offset = 0; + for (int i = 0; i < num_axes; ++i) { + std::vector> rope_emb = rope(trans_ids[i], axes_dim[i], theta); // [bs*pos_len, axes_dim[i]/2 * 2 * 2] + for (int b = 0; b < bs; ++b) { + for (int j = 0; j < pos_len; ++j) { + for (int k = 0; k < rope_emb[0].size(); ++k) { + emb[b * pos_len + j][offset + k] = rope_emb[j][k]; + } + } + } + offset += rope_emb[0].size(); + } + + return flatten(emb); + } + + public: + FluxParams params; + Flux() {} + Flux(FluxParams params) + : params(params) { + int64_t pe_dim = params.hidden_size / params.num_heads; + + blocks["img_in"] = std::shared_ptr(new Linear(params.in_channels, params.hidden_size, true)); + if (params.is_chroma) { + blocks["distilled_guidance_layer"] = std::shared_ptr(new ChromaApproximator(params.in_channels, params.hidden_size)); + } else { + blocks["time_in"] = std::shared_ptr(new MLPEmbedder(256, params.hidden_size)); + blocks["vector_in"] = std::shared_ptr(new MLPEmbedder(params.vec_in_dim, params.hidden_size)); + if (params.guidance_embed) { + blocks["guidance_in"] = std::shared_ptr(new MLPEmbedder(256, params.hidden_size)); + } + } + blocks["txt_in"] = std::shared_ptr(new Linear(params.context_in_dim, params.hidden_size, true)); + + for (int i = 0; i < params.depth; i++) { + blocks["double_blocks." + std::to_string(i)] = std::shared_ptr(new DoubleStreamBlock(params.hidden_size, + params.num_heads, + params.mlp_ratio, + i, + params.qkv_bias, + params.flash_attn, + params.is_chroma)); + } + + for (int i = 0; i < params.depth_single_blocks; i++) { + blocks["single_blocks." + std::to_string(i)] = std::shared_ptr(new SingleStreamBlock(params.hidden_size, + params.num_heads, + params.mlp_ratio, + i, + 0.f, + params.flash_attn, + params.is_chroma)); + } + + blocks["final_layer"] = std::shared_ptr(new LastLayer(params.hidden_size, 1, params.out_channels, params.is_chroma)); + } + + struct ggml_tensor* patchify(struct ggml_context* ctx, + struct ggml_tensor* x, + int64_t patch_size) { + // x: [N, C, H, W] + // return: [N, h*w, C * patch_size * patch_size] + int64_t N = x->ne[3]; + int64_t C = x->ne[2]; + int64_t H = x->ne[1]; + int64_t W = x->ne[0]; + int64_t p = patch_size; + int64_t h = H / patch_size; + int64_t w = W / patch_size; + + GGML_ASSERT(h * p == H && w * p == W); + + x = ggml_reshape_4d(ctx, x, p, w, p, h * C * N); // [N*C*h, p, w, p] + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N*C*h, w, p, p] + x = ggml_reshape_4d(ctx, x, p * p, w * h, C, N); // [N, C, h*w, p*p] + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N, h*w, C, p*p] + x = ggml_reshape_3d(ctx, x, p * p * C, w * h, N); // [N, h*w, C*p*p] + return x; + } + + struct ggml_tensor* unpatchify(struct ggml_context* ctx, + struct ggml_tensor* x, + int64_t h, + int64_t w, + int64_t patch_size) { + // x: [N, h*w, C*patch_size*patch_size] + // return: [N, C, H, W] + int64_t N = x->ne[2]; + int64_t C = x->ne[0] / patch_size / patch_size; + int64_t H = h * patch_size; + int64_t W = w * patch_size; + int64_t p = patch_size; + + GGML_ASSERT(C * p * p == x->ne[0]); + + x = ggml_reshape_4d(ctx, x, p * p, C, w * h, N); // [N, h*w, C, p*p] + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N, C, h*w, p*p] + x = ggml_reshape_4d(ctx, x, p, p, w, h * C * N); // [N*C*h, w, p, p] + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N*C*h, p, w, p] + x = ggml_reshape_4d(ctx, x, W, H, C, N); // [N, C, h*p, w*p] + + return x; + } + + struct ggml_tensor* forward_orig(struct ggml_context* ctx, + struct ggml_tensor* img, + struct ggml_tensor* txt, + struct ggml_tensor* timesteps, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + struct ggml_tensor* pe, + struct ggml_tensor* mod_index_arange = NULL, + std::vector skip_layers = {}) { + auto img_in = std::dynamic_pointer_cast(blocks["img_in"]); + auto txt_in = std::dynamic_pointer_cast(blocks["txt_in"]); + auto final_layer = std::dynamic_pointer_cast(blocks["final_layer"]); + + img = img_in->forward(ctx, img); + struct ggml_tensor* vec; + struct ggml_tensor* txt_img_mask = NULL; + if (params.is_chroma) { + int64_t mod_index_length = 344; + auto approx = std::dynamic_pointer_cast(blocks["distilled_guidance_layer"]); + auto distill_timestep = ggml_nn_timestep_embedding(ctx, timesteps, 16, 10000, 1000.f); + auto distill_guidance = ggml_nn_timestep_embedding(ctx, guidance, 16, 10000, 1000.f); + + // auto mod_index_arange = ggml_arange(ctx, 0, (float)mod_index_length, 1); + // ggml_arange tot working on a lot of backends, precomputing it on CPU instead + GGML_ASSERT(arange != NULL); + auto modulation_index = ggml_nn_timestep_embedding(ctx, mod_index_arange, 32, 10000, 1000.f); // [1, 344, 32] + + // Batch broadcast (will it ever be useful) + modulation_index = ggml_repeat(ctx, modulation_index, ggml_new_tensor_3d(ctx, GGML_TYPE_F32, modulation_index->ne[0], modulation_index->ne[1], img->ne[2])); // [N, 344, 32] + + auto timestep_guidance = ggml_concat(ctx, distill_timestep, distill_guidance, 0); // [N, 1, 32] + timestep_guidance = ggml_repeat(ctx, timestep_guidance, modulation_index); // [N, 344, 32] + + vec = ggml_concat(ctx, timestep_guidance, modulation_index, 0); // [N, 344, 64] + // Permute for consistency with non-distilled modulation implementation + vec = ggml_cont(ctx, ggml_permute(ctx, vec, 0, 2, 1, 3)); // [344, N, 64] + vec = approx->forward(ctx, vec); // [344, N, hidden_size] + + if (y != NULL) { + txt_img_mask = ggml_pad(ctx, y, img->ne[1], 0, 0, 0); + } + } else { + auto time_in = std::dynamic_pointer_cast(blocks["time_in"]); + auto vector_in = std::dynamic_pointer_cast(blocks["vector_in"]); + vec = time_in->forward(ctx, ggml_nn_timestep_embedding(ctx, timesteps, 256, 10000, 1000.f)); + if (params.guidance_embed) { + GGML_ASSERT(guidance != NULL); + auto guidance_in = std::dynamic_pointer_cast(blocks["guidance_in"]); + // bf16 and fp16 result is different + auto g_in = ggml_nn_timestep_embedding(ctx, guidance, 256, 10000, 1000.f); + vec = ggml_add(ctx, vec, guidance_in->forward(ctx, g_in)); + } + + vec = ggml_add(ctx, vec, vector_in->forward(ctx, y)); + } + + txt = txt_in->forward(ctx, txt); + + for (int i = 0; i < params.depth; i++) { + if (skip_layers.size() > 0 && std::find(skip_layers.begin(), skip_layers.end(), i) != skip_layers.end()) { + continue; + } + + auto block = std::dynamic_pointer_cast(blocks["double_blocks." + std::to_string(i)]); + + auto img_txt = block->forward(ctx, img, txt, vec, pe, txt_img_mask); + img = img_txt.first; // [N, n_img_token, hidden_size] + txt = img_txt.second; // [N, n_txt_token, hidden_size] + } + + auto txt_img = ggml_concat(ctx, txt, img, 1); // [N, n_txt_token + n_img_token, hidden_size] + for (int i = 0; i < params.depth_single_blocks; i++) { + if (skip_layers.size() > 0 && std::find(skip_layers.begin(), skip_layers.end(), i + params.depth) != skip_layers.end()) { + continue; + } + auto block = std::dynamic_pointer_cast(blocks["single_blocks." + std::to_string(i)]); + + txt_img = block->forward(ctx, txt_img, vec, pe, txt_img_mask); + } + + txt_img = ggml_cont(ctx, ggml_permute(ctx, txt_img, 0, 2, 1, 3)); // [n_txt_token + n_img_token, N, hidden_size] + img = ggml_view_3d(ctx, + txt_img, + txt_img->ne[0], + txt_img->ne[1], + img->ne[1], + txt_img->nb[1], + txt_img->nb[2], + txt_img->nb[2] * txt->ne[1]); // [n_img_token, N, hidden_size] + img = ggml_cont(ctx, ggml_permute(ctx, img, 0, 2, 1, 3)); // [N, n_img_token, hidden_size] + + img = final_layer->forward(ctx, img, vec); // (N, T, patch_size ** 2 * out_channels) + return img; + } + + struct ggml_tensor* process_img(struct ggml_context* ctx, + struct ggml_tensor* x) { + int64_t W = x->ne[0]; + int64_t H = x->ne[1]; + int64_t patch_size = 2; + int pad_h = (patch_size - H % patch_size) % patch_size; + int pad_w = (patch_size - W % patch_size) % patch_size; + x = ggml_pad(ctx, x, pad_w, pad_h, 0, 0); // [N, C, H + pad_h, W + pad_w] + + // img = rearrange(x, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + auto img = patchify(ctx, x, patch_size); // [N, h*w, C * patch_size * patch_size] + return img; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* timestep, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + struct ggml_tensor* pe, + struct ggml_tensor* mod_index_arange = NULL, + std::vector ref_latents = {}, + std::vector skip_layers = {}) { + // Forward pass of DiT. + // x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + // timestep: (N,) tensor of diffusion timesteps + // context: (N, L, D) + // c_concat: NULL, or for (N,C+M, H, W) for Fill + // y: (N, adm_in_channels) tensor of class labels + // guidance: (N,) + // pe: (L, d_head/2, 2, 2) + // return: (N, C, H, W) + + GGML_ASSERT(x->ne[3] == 1); + + int64_t W = x->ne[0]; + int64_t H = x->ne[1]; + int64_t C = x->ne[2]; + int64_t patch_size = 2; + int pad_h = (patch_size - H % patch_size) % patch_size; + int pad_w = (patch_size - W % patch_size) % patch_size; + + auto img = process_img(ctx, x); + uint64_t img_tokens = img->ne[1]; + + if (c_concat != NULL) { + ggml_tensor* masked = ggml_view_4d(ctx, c_concat, c_concat->ne[0], c_concat->ne[1], C, 1, c_concat->nb[1], c_concat->nb[2], c_concat->nb[3], 0); + ggml_tensor* mask = ggml_view_4d(ctx, c_concat, c_concat->ne[0], c_concat->ne[1], 8 * 8, 1, c_concat->nb[1], c_concat->nb[2], c_concat->nb[3], c_concat->nb[2] * C); + + masked = process_img(ctx, masked); + mask = process_img(ctx, mask); + + img = ggml_concat(ctx, img, ggml_concat(ctx, masked, mask, 0), 0); + } + + if (ref_latents.size() > 0) { + for (ggml_tensor* ref : ref_latents) { + ref = process_img(ctx, ref); + img = ggml_concat(ctx, img, ref, 1); + } + } + + auto out = forward_orig(ctx, img, context, timestep, y, guidance, pe, mod_index_arange, skip_layers); // [N, num_tokens, C * patch_size * patch_size] + if (out->ne[1] > img_tokens) { + out = ggml_cont(ctx, ggml_permute(ctx, out, 0, 2, 1, 3)); // [num_tokens, N, C * patch_size * patch_size] + out = ggml_view_3d(ctx, out, out->ne[0], out->ne[1], img_tokens, out->nb[1], out->nb[2], 0); + out = ggml_cont(ctx, ggml_permute(ctx, out, 0, 2, 1, 3)); // [N, h*w, C * patch_size * patch_size] + } + + // rearrange(out, "b (h w) (c ph pw) -> b c (h ph) (w pw)", h=h_len, w=w_len, ph=2, pw=2) + out = unpatchify(ctx, out, (H + pad_h) / patch_size, (W + pad_w) / patch_size, patch_size); // [N, C, H + pad_h, W + pad_w] + + return out; + } + }; + + struct FluxRunner : public GGMLRunner { + static std::map empty_tensor_types; + + public: + FluxParams flux_params; + Flux flux; + std::vector pe_vec; + std::vector mod_index_arange_vec; // for cache + SDVersion version; + bool use_mask = false; + + FluxRunner(ggml_backend_t backend, + std::map& tensor_types = empty_tensor_types, + const std::string prefix = "", + SDVersion version = VERSION_FLUX, + bool flash_attn = false, + bool use_mask = false) + : GGMLRunner(backend), use_mask(use_mask) { + flux_params.flash_attn = flash_attn; + flux_params.guidance_embed = false; + flux_params.depth = 0; + flux_params.depth_single_blocks = 0; + if (version == VERSION_FLUX_FILL) { + flux_params.in_channels = 384; + } + for (auto pair : tensor_types) { + std::string tensor_name = pair.first; + if (tensor_name.find("model.diffusion_model.") == std::string::npos) + continue; + if (tensor_name.find("guidance_in.in_layer.weight") != std::string::npos) { + // not schnell + flux_params.guidance_embed = true; + } + if (tensor_name.find("distilled_guidance_layer.in_proj.weight") != std::string::npos) { + // Chroma + flux_params.is_chroma = true; + } + size_t db = tensor_name.find("double_blocks."); + if (db != std::string::npos) { + tensor_name = tensor_name.substr(db); // remove prefix + int block_depth = atoi(tensor_name.substr(14, tensor_name.find(".", 14)).c_str()); + if (block_depth + 1 > flux_params.depth) { + flux_params.depth = block_depth + 1; + } + } + size_t sb = tensor_name.find("single_blocks."); + if (sb != std::string::npos) { + tensor_name = tensor_name.substr(sb); // remove prefix + int block_depth = atoi(tensor_name.substr(14, tensor_name.find(".", 14)).c_str()); + if (block_depth + 1 > flux_params.depth_single_blocks) { + flux_params.depth_single_blocks = block_depth + 1; + } + } + } + + LOG_INFO("Flux blocks: %d double, %d single", flux_params.depth, flux_params.depth_single_blocks); + if (flux_params.is_chroma) { + LOG_INFO("Using pruned modulation (Chroma)"); + } else if (!flux_params.guidance_embed) { + LOG_INFO("Flux guidance is disabled (Schnell mode)"); + } + + flux = Flux(flux_params); + flux.init(params_ctx, tensor_types, prefix); + } + + std::string get_desc() { + return "flux"; + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + flux.get_param_tensors(tensors, prefix); + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + std::vector skip_layers = {}) { + GGML_ASSERT(x->ne[3] == 1); + struct ggml_cgraph* gf = ggml_new_graph_custom(compute_ctx, FLUX_GRAPH_SIZE, false); + + struct ggml_tensor* mod_index_arange = NULL; + + x = to_backend(x); + context = to_backend(context); + if (c_concat != NULL) { + c_concat = to_backend(c_concat); + } + if (flux_params.is_chroma) { + guidance = ggml_set_f32(guidance, 0); + + if (!use_mask) { + y = NULL; + } + + // ggml_arange is not working on some backends, precompute it + mod_index_arange_vec = arange(0, 344); + mod_index_arange = ggml_new_tensor_1d(compute_ctx, GGML_TYPE_F32, mod_index_arange_vec.size()); + set_backend_tensor_data(mod_index_arange, mod_index_arange_vec.data()); + } + y = to_backend(y); + + timesteps = to_backend(timesteps); + if (flux_params.guidance_embed || flux_params.is_chroma) { + guidance = to_backend(guidance); + } + for (int i = 0; i < ref_latents.size(); i++) { + ref_latents[i] = to_backend(ref_latents[i]); + } + + pe_vec = flux.gen_pe(x->ne[1], x->ne[0], 2, x->ne[3], context->ne[1], ref_latents, flux_params.theta, flux_params.axes_dim); + int pos_len = pe_vec.size() / flux_params.axes_dim_sum / 2; + // LOG_DEBUG("pos_len %d", pos_len); + auto pe = ggml_new_tensor_4d(compute_ctx, GGML_TYPE_F32, 2, 2, flux_params.axes_dim_sum / 2, pos_len); + // pe->data = pe_vec.data(); + // print_ggml_tensor(pe); + // pe->data = NULL; + set_backend_tensor_data(pe, pe_vec.data()); + + struct ggml_tensor* out = flux.forward(compute_ctx, + x, + timesteps, + context, + c_concat, + y, + guidance, + pe, + mod_index_arange, + ref_latents, + skip_layers); + + ggml_build_forward_expand(gf, out); + + return gf; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* c_concat, + struct ggml_tensor* y, + struct ggml_tensor* guidance, + std::vector ref_latents = {}, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) { + // x: [N, in_channels, h, w] + // timesteps: [N, ] + // context: [N, max_position, hidden_size] + // y: [N, adm_in_channels] or [1, adm_in_channels] + // guidance: [N, ] + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(x, timesteps, context, c_concat, y, guidance, ref_latents, skip_layers); + }; + + GGMLRunner::compute(get_graph, n_threads, false, output, output_ctx); + } + + void test() { + struct ggml_init_params params; + params.mem_size = static_cast(20 * 1024 * 1024); // 20 MB + params.mem_buffer = NULL; + params.no_alloc = false; + + struct ggml_context* work_ctx = ggml_init(params); + GGML_ASSERT(work_ctx != NULL); + + { + // cpu f16: + // cuda f16: nan + // cuda q8_0: pass + auto x = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 16, 16, 16, 1); + ggml_set_f32(x, 0.01f); + // print_ggml_tensor(x); + + std::vector timesteps_vec(1, 999.f); + auto timesteps = vector_to_ggml_tensor(work_ctx, timesteps_vec); + + std::vector guidance_vec(1, 3.5f); + auto guidance = vector_to_ggml_tensor(work_ctx, guidance_vec); + + auto context = ggml_new_tensor_3d(work_ctx, GGML_TYPE_F32, 4096, 256, 1); + ggml_set_f32(context, 0.01f); + // print_ggml_tensor(context); + + auto y = ggml_new_tensor_2d(work_ctx, GGML_TYPE_F32, 768, 1); + ggml_set_f32(y, 0.01f); + // print_ggml_tensor(y); + + struct ggml_tensor* out = NULL; + + int t0 = ggml_time_ms(); + compute(8, x, timesteps, context, NULL, y, guidance, {}, &out, work_ctx); + int t1 = ggml_time_ms(); + + print_ggml_tensor(out); + LOG_DEBUG("flux test done in %dms", t1 - t0); + } + } + + static void load_from_file_and_test(const std::string& file_path) { + // ggml_backend_t backend = ggml_backend_cuda_init(0); + ggml_backend_t backend = ggml_backend_cpu_init(); + ggml_type model_data_type = GGML_TYPE_Q8_0; + std::shared_ptr flux = std::shared_ptr(new FluxRunner(backend)); + { + LOG_INFO("loading from '%s'", file_path.c_str()); + + flux->alloc_params_buffer(); + std::map tensors; + flux->get_param_tensors(tensors, "model.diffusion_model"); + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path, "model.diffusion_model.")) { + LOG_ERROR("init model loader from file failed: '%s'", file_path.c_str()); + return; + } + + bool success = model_loader.load_tensors(tensors, backend); + + if (!success) { + LOG_ERROR("load tensors from model loader failed"); + return; + } + + LOG_INFO("flux model loaded"); + } + flux->test(); + } + }; + +} // namespace Flux + +#endif // __FLUX_HPP__ diff --git a/format-code.sh b/format-code.sh new file mode 100644 index 000000000..e0c0f3a1a --- /dev/null +++ b/format-code.sh @@ -0,0 +1,2 @@ +clang-format -style=file -i *.cpp *.h *.hpp +clang-format -style=file -i examples/cli/*.cpp \ No newline at end of file diff --git a/ggml b/ggml index ed522bb80..9e4bee1c5 160000 --- a/ggml +++ b/ggml @@ -1 +1 @@ -Subproject commit ed522bb8051658899b2f4a5bbb5483a5d21fcfb2 +Subproject commit 9e4bee1c5afc2d677a5b32ecb90cbdb483e81fff diff --git a/ggml_extend.hpp b/ggml_extend.hpp new file mode 100644 index 000000000..9f6a4fef6 --- /dev/null +++ b/ggml_extend.hpp @@ -0,0 +1,1652 @@ +#ifndef __GGML_EXTEND_HPP__ +#define __GGML_EXTEND_HPP__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ggml-alloc.h" +#include "ggml-backend.h" +#include "ggml-cpu.h" +#include "ggml.h" + +#include "model.h" + +#ifdef SD_USE_CUDA +#include "ggml-cuda.h" +#endif + +#ifdef SD_USE_METAL +#include "ggml-metal.h" +#endif + +#ifdef SD_USE_VULKAN +#include "ggml-vulkan.h" +#endif + +#ifdef SD_USE_OPENCL +#include "ggml-opencl.h" +#endif + +#ifdef SD_USE_SYCL +#include "ggml-sycl.h" +#endif + +#include "rng.hpp" +#include "util.h" + +#define EPS 1e-05f + +#ifndef __STATIC_INLINE__ +#define __STATIC_INLINE__ static inline +#endif + +// n-mode trensor-matrix product +// example: 2-mode product +// A: [ne03, k, ne01, ne00] +// B: k rows, m columns => [k, m] +// result is [ne03, m, ne01, ne00] +__STATIC_INLINE__ struct ggml_tensor* ggml_mul_n_mode(struct ggml_context* ctx, struct ggml_tensor* a, struct ggml_tensor* b, int mode = 0) { + // reshape A + // swap 0th and nth axis + a = ggml_cont(ctx, ggml_permute(ctx, a, mode, mode != 1 ? 1 : 0, mode != 2 ? 2 : 0, mode != 3 ? 3 : 0)); + int ne1 = a->ne[1]; + int ne2 = a->ne[2]; + int ne3 = a->ne[3]; + // make 2D + a = ggml_cont(ctx, ggml_reshape_2d(ctx, a, a->ne[0], (ne3 * ne2 * ne1))); + + struct ggml_tensor* result = ggml_cont(ctx, ggml_transpose(ctx, ggml_mul_mat(ctx, a, b))); + + // reshape output (same shape as a after permutation except first dim) + result = ggml_reshape_4d(ctx, result, result->ne[0], ne1, ne2, ne3); + // swap back 0th and nth axis + result = ggml_permute(ctx, result, mode, mode != 1 ? 1 : 0, mode != 2 ? 2 : 0, mode != 3 ? 3 : 0); + return result; +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_merge_lora(ggml_context* ctx, struct ggml_tensor* lora_down, struct ggml_tensor* lora_up, struct ggml_tensor* lora_mid = NULL) { + struct ggml_tensor* updown; + // flat lora tensors to multiply it + int64_t lora_up_rows = lora_up->ne[ggml_n_dims(lora_up) - 1]; + lora_up = ggml_reshape_2d(ctx, lora_up, ggml_nelements(lora_up) / lora_up_rows, lora_up_rows); + auto lora_down_n_dims = ggml_n_dims(lora_down); + // assume n_dims should always be a multiple of 2 (otherwise rank 1 doesn't work) + lora_down_n_dims = (lora_down_n_dims + lora_down_n_dims % 2); + int64_t lora_down_rows = lora_down->ne[lora_down_n_dims - 1]; + lora_down = ggml_reshape_2d(ctx, lora_down, ggml_nelements(lora_down) / lora_down_rows, lora_down_rows); + + // ggml_mul_mat requires tensor b transposed + lora_down = ggml_cont(ctx, ggml_transpose(ctx, lora_down)); + if (lora_mid == NULL) { + updown = ggml_mul_mat(ctx, lora_up, lora_down); + updown = ggml_cont(ctx, ggml_transpose(ctx, updown)); + } else { + // undoing tucker decomposition for conv layers. + // lora_mid has shape (3, 3, Rank, Rank) + // lora_down has shape (Rank, In, 1, 1) + // lora_up has shape (Rank, Out, 1, 1) + // conv layer shape is (3, 3, Out, In) + updown = ggml_mul_n_mode(ctx, ggml_mul_n_mode(ctx, lora_mid, lora_down, 3), lora_up, 2); + updown = ggml_cont(ctx, updown); + } + return updown; +} + +// Kronecker product +// [ne03,ne02,ne01,ne00] x [ne13,ne12,ne11,ne10] => [ne03*ne13,ne02*ne12,ne01*ne11,ne00*ne10] +__STATIC_INLINE__ struct ggml_tensor* ggml_kronecker(ggml_context* ctx, struct ggml_tensor* a, struct ggml_tensor* b) { + return ggml_mul(ctx, + ggml_upscale_ext(ctx, + a, + a->ne[0] * b->ne[0], + a->ne[1] * b->ne[1], + a->ne[2] * b->ne[2], + a->ne[3] * b->ne[3], + GGML_SCALE_MODE_NEAREST), + b); +} + +__STATIC_INLINE__ void ggml_log_callback_default(ggml_log_level level, const char* text, void* user_data) { + (void)level; + (void)user_data; + fputs(text, stderr); + fflush(stderr); +} + +__STATIC_INLINE__ void ggml_tensor_set_f32_randn(struct ggml_tensor* tensor, std::shared_ptr rng) { + uint32_t n = (uint32_t)ggml_nelements(tensor); + std::vector random_numbers = rng->randn(n); + for (uint32_t i = 0; i < n; i++) { + ggml_set_f32_1d(tensor, i, random_numbers[i]); + } +} + +// set tensor[i, j, k, l] +// set tensor[l] +// set tensor[k, l] +// set tensor[j, k, l] +__STATIC_INLINE__ void ggml_tensor_set_f32(struct ggml_tensor* tensor, float value, int l, int k = 0, int j = 0, int i = 0) { + GGML_ASSERT(tensor->nb[0] == sizeof(float)); + *(float*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]) = value; +} + +__STATIC_INLINE__ float ggml_tensor_get_f32(const ggml_tensor* tensor, int l, int k = 0, int j = 0, int i = 0) { + if (tensor->buffer != NULL) { + float value; + ggml_backend_tensor_get(tensor, &value, i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0], sizeof(float)); + return value; + } + GGML_ASSERT(tensor->nb[0] == sizeof(float)); + return *(float*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]); +} + +__STATIC_INLINE__ int ggml_tensor_get_i32(const ggml_tensor* tensor, int l, int k = 0, int j = 0, int i = 0) { + if (tensor->buffer != NULL) { + float value; + ggml_backend_tensor_get(tensor, &value, i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0], sizeof(int)); + return value; + } + GGML_ASSERT(tensor->nb[0] == sizeof(int)); + return *(int*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]); +} + +__STATIC_INLINE__ ggml_fp16_t ggml_tensor_get_f16(const ggml_tensor* tensor, int l, int k = 0, int j = 0, int i = 0) { + GGML_ASSERT(tensor->nb[0] == sizeof(ggml_fp16_t)); + return *(ggml_fp16_t*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]); +} + +static struct ggml_tensor* get_tensor_from_graph(struct ggml_cgraph* gf, const char* name) { + struct ggml_tensor* res = NULL; + for (int i = 0; i < ggml_graph_n_nodes(gf); i++) { + struct ggml_tensor* node = ggml_graph_node(gf, i); + // printf("%d, %s \n", i, ggml_get_name(node)); + if (strcmp(ggml_get_name(node), name) == 0) { + res = node; + break; + } + } + return res; +} + +__STATIC_INLINE__ void print_ggml_tensor(struct ggml_tensor* tensor, bool shape_only = false, const char* mark = "") { + printf("%s (%s): shape(%zu, %zu, %zu, %zu)\n", mark, ggml_type_name(tensor->type), tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + fflush(stdout); + if (shape_only) { + return; + } + int range = 3; + for (int i = 0; i < tensor->ne[3]; i++) { + if (i >= range && i + range < tensor->ne[3]) { + continue; + } + for (int j = 0; j < tensor->ne[2]; j++) { + if (j >= range && j + range < tensor->ne[2]) { + continue; + } + for (int k = 0; k < tensor->ne[1]; k++) { + if (k >= range && k + range < tensor->ne[1]) { + continue; + } + for (int l = 0; l < tensor->ne[0]; l++) { + if (l >= range && l + range < tensor->ne[0]) { + continue; + } + if (tensor->type == GGML_TYPE_F32) { + printf(" [%d, %d, %d, %d] = %f\n", i, j, k, l, ggml_tensor_get_f32(tensor, l, k, j, i)); + } else if (tensor->type == GGML_TYPE_F16) { + printf(" [%d, %d, %d, %d] = %i\n", i, j, k, l, ggml_tensor_get_f16(tensor, l, k, j, i)); + } else if (tensor->type == GGML_TYPE_I32) { + printf(" [%d, %d, %d, %d] = %i\n", i, j, k, l, ggml_tensor_get_i32(tensor, l, k, j, i)); + } + fflush(stdout); + } + } + } + } +} + +__STATIC_INLINE__ ggml_tensor* load_tensor_from_file(ggml_context* ctx, const std::string& file_path) { + std::ifstream file(file_path, std::ios::binary); + if (!file.is_open()) { + LOG_ERROR("failed to open '%s'", file_path.c_str()); + return NULL; + } + int32_t n_dims; + int32_t length; + int32_t ttype; + + file.read(reinterpret_cast(&n_dims), sizeof(n_dims)); + file.read(reinterpret_cast(&length), sizeof(length)); + file.read(reinterpret_cast(&ttype), sizeof(ttype)); + + if (file.eof()) { + LOG_ERROR("incomplete file '%s'", file_path.c_str()); + return NULL; + } + + int32_t nelements = 1; + int32_t ne[4] = {1, 1, 1, 1}; + for (int i = 0; i < n_dims; ++i) { + file.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); + nelements *= ne[i]; + } + std::string name(length, 0); + file.read(&name[0], length); + ggml_tensor* tensor = ggml_new_tensor_4d(ctx, (ggml_type)ttype, ne[0], ne[1], ne[2], ne[3]); + const size_t bpe = ggml_type_size(ggml_type(ttype)); + file.read(reinterpret_cast(tensor->data), ggml_nbytes(tensor)); + return tensor; +} + +// __STATIC_INLINE__ void save_tensor_to_file(const std::string& file_name, ggml_tensor* tensor, const std::string & name) { +// std::string file_name_ = file_name + ".tensor"; +// std::string name_ = name; +// std::ofstream file("./" + file_name_, std::ios::binary); +// file.write(reinterpret_cast(&tensor->n_dims), sizeof(tensor->n_dims)); +// int len = (int)name_.size(); +// file.write(reinterpret_cast(&len), sizeof(len)); +// int ttype = (int)tensor->type; +// file.write(reinterpret_cast(&ttype), sizeof(ttype)); +// for (int i = 0; i < tensor->n_dims; ++i) { +// int ne_ = (int) tensor->ne[i]; +// file.write(reinterpret_cast(&ne_), sizeof(ne_)); +// } +// file.write(&name_[0], len); +// char* data = nullptr; +// file.write((char*)tensor->data, ggml_nbytes(tensor)); +// file.close(); +// } + +__STATIC_INLINE__ void copy_ggml_tensor(struct ggml_tensor* dst, struct ggml_tensor* src) { + if (dst->type == src->type) { + dst->nb[0] = src->nb[0]; + dst->nb[1] = src->nb[1]; + dst->nb[2] = src->nb[2]; + dst->nb[3] = src->nb[3]; + + memcpy(((char*)dst->data), ((char*)src->data), ggml_nbytes(dst)); + return; + } + struct ggml_init_params params; + params.mem_size = 10 * 1024 * 1024; // for padding + params.mem_buffer = NULL; + params.no_alloc = false; + struct ggml_context* ctx = ggml_init(params); + if (!ctx) { + LOG_ERROR("ggml_init() failed"); + return; + } + ggml_tensor* final = ggml_cpy(ctx, src, dst); + + struct ggml_cgraph* graph = ggml_new_graph(ctx); + ggml_build_forward_expand(graph, final); + ggml_graph_compute_with_ctx(ctx, graph, 1); + ggml_free(ctx); +} + +__STATIC_INLINE__ float sigmoid(float x) { + return 1 / (1.0f + expf(-x)); +} + +// SPECIAL OPERATIONS WITH TENSORS + +__STATIC_INLINE__ uint8_t* sd_tensor_to_image(struct ggml_tensor* input) { + int64_t width = input->ne[0]; + int64_t height = input->ne[1]; + int64_t channels = input->ne[2]; + GGML_ASSERT(channels == 3 && input->type == GGML_TYPE_F32); + uint8_t* image_data = (uint8_t*)malloc(width * height * channels); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + float value = ggml_tensor_get_f32(input, ix, iy, k); + *(image_data + iy * width * channels + ix * channels + k) = (uint8_t)(value * 255.0f); + } + } + } + return image_data; +} + +__STATIC_INLINE__ uint8_t* sd_tensor_to_mul_image(struct ggml_tensor* input, int idx) { + int64_t width = input->ne[0]; + int64_t height = input->ne[1]; + int64_t channels = input->ne[2]; + GGML_ASSERT(channels == 3 && input->type == GGML_TYPE_F32); + uint8_t* image_data = (uint8_t*)malloc(width * height * channels); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + float value = ggml_tensor_get_f32(input, ix, iy, k, idx); + *(image_data + iy * width * channels + ix * channels + k) = (uint8_t)(value * 255.0f); + } + } + } + return image_data; +} + +__STATIC_INLINE__ void sd_image_to_tensor(const uint8_t* image_data, + struct ggml_tensor* output, + bool scale = true) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(channels == 3 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + float value = *(image_data + iy * width * channels + ix * channels + k); + if (scale) { + value /= 255.f; + } + ggml_tensor_set_f32(output, value, ix, iy, k); + } + } + } +} + +__STATIC_INLINE__ void sd_mask_to_tensor(const uint8_t* image_data, + struct ggml_tensor* output, + bool scale = true) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(channels == 1 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + float value = *(image_data + iy * width * channels + ix); + if (scale) { + value /= 255.f; + } + ggml_tensor_set_f32(output, value, ix, iy); + } + } +} + +__STATIC_INLINE__ void sd_apply_mask(struct ggml_tensor* image_data, + struct ggml_tensor* mask, + struct ggml_tensor* output) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(output->type == GGML_TYPE_F32); + for (int ix = 0; ix < width; ix++) { + for (int iy = 0; iy < height; iy++) { + float m = ggml_tensor_get_f32(mask, ix, iy); + m = round(m); // inpaint models need binary masks + ggml_tensor_set_f32(mask, m, ix, iy); + for (int k = 0; k < channels; k++) { + float value = (1 - m) * (ggml_tensor_get_f32(image_data, ix, iy, k) - .5) + .5; + ggml_tensor_set_f32(output, value, ix, iy, k); + } + } + } +} + +__STATIC_INLINE__ void sd_mul_images_to_tensor(const uint8_t* image_data, + struct ggml_tensor* output, + int idx, + float* mean = NULL, + float* std = NULL) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(channels == 3 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + int value = *(image_data + iy * width * channels + ix * channels + k); + float pixel_val = value / 255.0f; + if (mean != NULL && std != NULL) + pixel_val = (pixel_val - mean[k]) / std[k]; + ggml_tensor_set_f32(output, pixel_val, ix, iy, k, idx); + } + } + } +} + +__STATIC_INLINE__ void sd_image_f32_to_tensor(const float* image_data, + struct ggml_tensor* output, + bool scale = true) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(channels == 3 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + int value = *(image_data + iy * width * channels + ix * channels + k); + if (scale) { + value /= 255.f; + } + ggml_tensor_set_f32(output, value, ix, iy, k); + } + } + } +} + +__STATIC_INLINE__ void ggml_split_tensor_2d(struct ggml_tensor* input, + struct ggml_tensor* output, + int x, + int y) { + int64_t width = output->ne[0]; + int64_t height = output->ne[1]; + int64_t channels = output->ne[2]; + GGML_ASSERT(input->type == GGML_TYPE_F32 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + float value = ggml_tensor_get_f32(input, ix + x, iy + y, k); + ggml_tensor_set_f32(output, value, ix, iy, k); + } + } + } +} + +// unclamped -> expects x in the range [0-1] +__STATIC_INLINE__ float ggml_smootherstep_f32(const float x) { + GGML_ASSERT(x >= 0.f && x <= 1.f); + return x * x * x * (x * (6.0f * x - 15.0f) + 10.0f); +} + +__STATIC_INLINE__ void ggml_merge_tensor_2d(struct ggml_tensor* input, + struct ggml_tensor* output, + int x, + int y, + int overlap) { + int64_t width = input->ne[0]; + int64_t height = input->ne[1]; + int64_t channels = input->ne[2]; + + int64_t img_width = output->ne[0]; + int64_t img_height = output->ne[1]; + + GGML_ASSERT(input->type == GGML_TYPE_F32 && output->type == GGML_TYPE_F32); + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + for (int k = 0; k < channels; k++) { + float new_value = ggml_tensor_get_f32(input, ix, iy, k); + if (overlap > 0) { // blend colors in overlapped area + float old_value = ggml_tensor_get_f32(output, x + ix, y + iy, k); + + const float x_f_0 = (x > 0) ? ix / float(overlap) : 1; + const float x_f_1 = (x < (img_width - width)) ? (width - ix) / float(overlap) : 1; + const float y_f_0 = (y > 0) ? iy / float(overlap) : 1; + const float y_f_1 = (y < (img_height - height)) ? (height - iy) / float(overlap) : 1; + + const float x_f = std::min(std::min(x_f_0, x_f_1), 1.f); + const float y_f = std::min(std::min(y_f_0, y_f_1), 1.f); + + ggml_tensor_set_f32( + output, + old_value + new_value * ggml_smootherstep_f32(y_f) * ggml_smootherstep_f32(x_f), + x + ix, y + iy, k); + } else { + ggml_tensor_set_f32(output, new_value, x + ix, y + iy, k); + } + } + } + } +} + +__STATIC_INLINE__ float ggml_tensor_mean(struct ggml_tensor* src) { + float mean = 0.0f; + int64_t nelements = ggml_nelements(src); + float* data = (float*)src->data; + for (int i = 0; i < nelements; i++) { + mean += data[i] / nelements * 1.0f; + } + return mean; +} + +// a = a+b +__STATIC_INLINE__ void ggml_tensor_add(struct ggml_tensor* a, struct ggml_tensor* b) { + GGML_ASSERT(ggml_nelements(a) == ggml_nelements(b)); + int64_t nelements = ggml_nelements(a); + float* vec_a = (float*)a->data; + float* vec_b = (float*)b->data; + for (int i = 0; i < nelements; i++) { + vec_a[i] = vec_a[i] + vec_b[i]; + } +} + +__STATIC_INLINE__ void ggml_tensor_scale(struct ggml_tensor* src, float scale) { + int64_t nelements = ggml_nelements(src); + float* data = (float*)src->data; + for (int i = 0; i < nelements; i++) { + data[i] = data[i] * scale; + } +} + +__STATIC_INLINE__ void ggml_tensor_clamp(struct ggml_tensor* src, float min, float max) { + int64_t nelements = ggml_nelements(src); + float* data = (float*)src->data; + for (int i = 0; i < nelements; i++) { + float val = data[i]; + data[i] = val < min ? min : (val > max ? max : val); + } +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_tensor_concat(struct ggml_context* ctx, + struct ggml_tensor* a, + struct ggml_tensor* b, + int dim) { + int64_t ne[GGML_MAX_DIMS]; + for (int d = 0; d < GGML_MAX_DIMS; ++d) { + if (d == dim) { + ne[d] = a->ne[d] + b->ne[d]; + continue; + } + GGML_ASSERT(a->ne[d] == b->ne[d]); + ne[d] = a->ne[d]; + } + struct ggml_tensor* result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, ne); + int64_t o[4] = {0, 0, 0, 0}; + o[dim] = a->ne[dim]; + + float v; + for (int i3 = 0; i3 < result->ne[3]; i3++) { + for (int i2 = 0; i2 < result->ne[2]; i2++) { + for (int i1 = 0; i1 < result->ne[1]; i1++) { + for (int i0 = 0; i0 < result->ne[0]; i0++) { + if (i0 < a->ne[0] && i1 < a->ne[1] && i2 < a->ne[2] && i3 < a->ne[3]) { + v = ggml_tensor_get_f32(a, i0, i1, i2, i3); + } else { + v = ggml_tensor_get_f32(b, i0 - o[0], i1 - o[1], i2 - o[2], i3 - o[3]); + } + + ggml_tensor_set_f32(result, v, i0, i1, i2, i3); + } + } + } + } + return result; +} + +// convert values from [0, 1] to [-1, 1] +__STATIC_INLINE__ void ggml_tensor_scale_input(struct ggml_tensor* src) { + int64_t nelements = ggml_nelements(src); + float* data = (float*)src->data; + for (int i = 0; i < nelements; i++) { + float val = data[i]; + data[i] = val * 2.0f - 1.0f; + } +} + +// convert values from [-1, 1] to [0, 1] +__STATIC_INLINE__ void ggml_tensor_scale_output(struct ggml_tensor* src) { + int64_t nelements = ggml_nelements(src); + float* data = (float*)src->data; + for (int i = 0; i < nelements; i++) { + float val = data[i]; + data[i] = (val + 1.0f) * 0.5f; + } +} + +typedef std::function on_tile_process; + +// Tiling +__STATIC_INLINE__ void sd_tiling(ggml_tensor* input, ggml_tensor* output, const int scale, const int tile_size, const float tile_overlap_factor, on_tile_process on_processing) { + output = ggml_set_f32(output, 0); + + int input_width = (int)input->ne[0]; + int input_height = (int)input->ne[1]; + int output_width = (int)output->ne[0]; + int output_height = (int)output->ne[1]; + GGML_ASSERT(input_width % 2 == 0 && input_height % 2 == 0 && output_width % 2 == 0 && output_height % 2 == 0); // should be multiple of 2 + + int tile_overlap = (int32_t)(tile_size * tile_overlap_factor); + int non_tile_overlap = tile_size - tile_overlap; + + struct ggml_init_params params = {}; + params.mem_size += tile_size * tile_size * input->ne[2] * sizeof(float); // input chunk + params.mem_size += (tile_size * scale) * (tile_size * scale) * output->ne[2] * sizeof(float); // output chunk + params.mem_size += 3 * ggml_tensor_overhead(); + params.mem_buffer = NULL; + params.no_alloc = false; + + LOG_DEBUG("tile work buffer size: %.2f MB", params.mem_size / 1024.f / 1024.f); + + // draft context + struct ggml_context* tiles_ctx = ggml_init(params); + if (!tiles_ctx) { + LOG_ERROR("ggml_init() failed"); + return; + } + + // tiling + ggml_tensor* input_tile = ggml_new_tensor_4d(tiles_ctx, GGML_TYPE_F32, tile_size, tile_size, input->ne[2], 1); + ggml_tensor* output_tile = ggml_new_tensor_4d(tiles_ctx, GGML_TYPE_F32, tile_size * scale, tile_size * scale, output->ne[2], 1); + on_processing(input_tile, NULL, true); + int num_tiles = ceil((float)input_width / non_tile_overlap) * ceil((float)input_height / non_tile_overlap); + LOG_INFO("processing %i tiles", num_tiles); + pretty_progress(1, num_tiles, 0.0f); + int tile_count = 1; + bool last_y = false, last_x = false; + float last_time = 0.0f; + for (int y = 0; y < input_height && !last_y; y += non_tile_overlap) { + if (y + tile_size >= input_height) { + y = input_height - tile_size; + last_y = true; + } + for (int x = 0; x < input_width && !last_x; x += non_tile_overlap) { + if (x + tile_size >= input_width) { + x = input_width - tile_size; + last_x = true; + } + int64_t t1 = ggml_time_ms(); + ggml_split_tensor_2d(input, input_tile, x, y); + on_processing(input_tile, output_tile, false); + ggml_merge_tensor_2d(output_tile, output, x * scale, y * scale, tile_overlap * scale); + int64_t t2 = ggml_time_ms(); + last_time = (t2 - t1) / 1000.0f; + pretty_progress(tile_count, num_tiles, last_time); + tile_count++; + } + last_x = false; + } + if (tile_count < num_tiles) { + pretty_progress(num_tiles, num_tiles, last_time); + } + ggml_free(tiles_ctx); +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_group_norm_32(struct ggml_context* ctx, + struct ggml_tensor* a) { + const float eps = 1e-6f; // default eps parameter + return ggml_group_norm(ctx, a, 32, eps); +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_linear(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b) { + x = ggml_mul_mat(ctx, w, x); + if (b != NULL) { + x = ggml_add(ctx, x, b); + } + return x; +} + +// w: [OC,IC, KH, KW] +// x: [N, IC, IH, IW] +// b: [OC,] +// result: [N, OC, OH, OW] +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_conv_2d(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b, + int s0 = 1, + int s1 = 1, + int p0 = 0, + int p1 = 0, + int d0 = 1, + int d1 = 1) { + x = ggml_conv_2d(ctx, w, x, s0, s1, p0, p1, d0, d1); + if (b != NULL) { + b = ggml_reshape_4d(ctx, b, 1, 1, b->ne[0], 1); + // b = ggml_repeat(ctx, b, x); + x = ggml_add(ctx, x, b); + } + return x; +} + +// w: [OC,IC, KD, 1 * 1] +// x: [N, IC, IH, IW] +// b: [OC,] +// result: [N, OC, OH, OW] +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_conv_3d_nx1x1_bak(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b, + int s2 = 1, + int p2 = 1, + int d2 = 1) { + GGML_ASSERT(w->ne[0] == 1); + // timesteps = x.shape[0] + // x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + // x = conv3d(x) + // return rearrange(x, "b c t h w -> (b t) c h w") + int64_t T = x->ne[3]; + int64_t B = x->ne[3] / T; + int64_t C = x->ne[2]; + int64_t H = x->ne[1]; + int64_t W = x->ne[0]; + + x = ggml_reshape_4d(ctx, x, W * H, C, T, B); // (b t) c h w -> b t c (h w) + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // b t c (h w) -> b c t (h w) + x = ggml_conv_2d(ctx, w, x, 1, s2, 0, p2, 1, d2); // [B, OC, T, OH * OW] + if (b != NULL) { + b = ggml_reshape_4d(ctx, b, 1, 1, b->ne[0], 1); + x = ggml_add(ctx, x, b); + } + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // b c t (h w) -> b t c (h w) + x = ggml_reshape_4d(ctx, x, W, H, C, T * B); // b t c (h w) -> (b t) c h w + return x; // [B*T, OC, OH, OW] +} + +// w: [OC,IC, KD, 1 * 1] +// x: [N, IC, ID, IH*IW] +// b: [OC,] +// result: [N, OC, OD, OH*OW] +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_conv_3d_nx1x1(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b, + int s2 = 1, + int p2 = 1, + int d2 = 1) { + x = ggml_conv_2d(ctx, w, x, 1, s2, 0, p2, 1, d2); // [N, OC, T, OH * OW] + if (b != NULL) { + b = ggml_reshape_4d(ctx, b, 1, 1, b->ne[0], 1); + x = ggml_add(ctx, x, b); + } + return x; // [N, OC, T, OH * OW] +} + +// qkv: [N, L, 3*C] +// return: ([N, L, C], [N, L, C], [N, L, C]) +__STATIC_INLINE__ std::vector split_qkv(struct ggml_context* ctx, + struct ggml_tensor* qkv) { + qkv = ggml_reshape_4d(ctx, qkv, qkv->ne[0] / 3, 3, qkv->ne[1], qkv->ne[2]); // [N, L, 3, C] + qkv = ggml_cont(ctx, ggml_permute(ctx, qkv, 0, 3, 1, 2)); // [3, N, L, C] + + int64_t offset = qkv->nb[2] * qkv->ne[2]; + auto q = ggml_view_3d(ctx, qkv, qkv->ne[0], qkv->ne[1], qkv->ne[2], qkv->nb[1], qkv->nb[2], offset * 0); // [N, L, C] + auto k = ggml_view_3d(ctx, qkv, qkv->ne[0], qkv->ne[1], qkv->ne[2], qkv->nb[1], qkv->nb[2], offset * 1); // [N, L, C] + auto v = ggml_view_3d(ctx, qkv, qkv->ne[0], qkv->ne[1], qkv->ne[2], qkv->nb[1], qkv->nb[2], offset * 2); // [N, L, C] + return {q, k, v}; +} + +// q: [N * n_head, n_token, d_head] +// k: [N * n_head, n_k, d_head] +// v: [N * n_head, d_head, n_k] +// return: [N * n_head, n_token, d_head] +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_attention(struct ggml_context* ctx, + struct ggml_tensor* q, + struct ggml_tensor* k, + struct ggml_tensor* v, + bool mask = false) { +#if defined(SD_USE_FLASH_ATTENTION) && !defined(SD_USE_CUDA) && !defined(SD_USE_METAL) && !defined(SD_USE_VULKAN) && !defined(SD_USE_SYCL) + struct ggml_tensor* kqv = ggml_flash_attn(ctx, q, k, v, false); // [N * n_head, n_token, d_head] +#else + float d_head = (float)q->ne[0]; + struct ggml_tensor* kq = ggml_mul_mat(ctx, k, q); // [N * n_head, n_token, n_k] + kq = ggml_scale_inplace(ctx, kq, 1.0f / sqrt(d_head)); + if (mask) { + kq = ggml_diag_mask_inf_inplace(ctx, kq, 0); + } + kq = ggml_soft_max_inplace(ctx, kq); + struct ggml_tensor* kqv = ggml_mul_mat(ctx, v, kq); // [N * n_head, n_token, d_head] +#endif + return kqv; +} + +// q: [N, L_q, C] or [N*n_head, L_q, d_head] +// k: [N, L_k, C] or [N*n_head, L_k, d_head] +// v: [N, L_k, C] or [N, L_k, n_head, d_head] +// return: [N, L_q, C] +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_attention_ext(struct ggml_context* ctx, + struct ggml_tensor* q, + struct ggml_tensor* k, + struct ggml_tensor* v, + int64_t n_head, + struct ggml_tensor* mask = NULL, + bool diag_mask_inf = false, + bool skip_reshape = false, + bool flash_attn = false) { + int64_t L_q; + int64_t L_k; + int64_t C; + int64_t N; + int64_t d_head; + if (!skip_reshape) { + L_q = q->ne[1]; + L_k = k->ne[1]; + C = q->ne[0]; + N = q->ne[2]; + d_head = C / n_head; + q = ggml_reshape_4d(ctx, q, d_head, n_head, L_q, N); // [N, L_q, n_head, d_head] + q = ggml_cont(ctx, ggml_permute(ctx, q, 0, 2, 1, 3)); // [N, n_head, L_q, d_head] + q = ggml_reshape_3d(ctx, q, d_head, L_q, n_head * N); // [N * n_head, L_q, d_head] + + k = ggml_reshape_4d(ctx, k, d_head, n_head, L_k, N); // [N, L_k, n_head, d_head] + k = ggml_cont(ctx, ggml_permute(ctx, k, 0, 2, 1, 3)); // [N, n_head, L_k, d_head] + k = ggml_reshape_3d(ctx, k, d_head, L_k, n_head * N); // [N * n_head, L_k, d_head] + + v = ggml_reshape_4d(ctx, v, d_head, n_head, L_k, N); // [N, L_k, n_head, d_head] + } else { + L_q = q->ne[1]; + L_k = k->ne[1]; + d_head = v->ne[0]; + N = v->ne[3]; + C = d_head * n_head; + } + + float scale = (1.0f / sqrt((float)d_head)); + + // if (flash_attn) { + // LOG_DEBUG("attention_ext L_q:%d L_k:%d n_head:%d C:%d d_head:%d N:%d", L_q, L_k, n_head, C, d_head, N); + // } + // is there anything oddly shaped?? ping Green-Sky if you can trip this assert + GGML_ASSERT(((L_k % 256 == 0) && L_q == L_k) || !(L_k % 256 == 0)); + + bool can_use_flash_attn = true; + can_use_flash_attn = can_use_flash_attn && L_k % 256 == 0; + can_use_flash_attn = can_use_flash_attn && d_head % 64 == 0; // double check + + // cuda max d_head seems to be 256, cpu does seem to work with 512 + can_use_flash_attn = can_use_flash_attn && d_head <= 256; // double check + + if (mask != nullptr) { + // TODO(Green-Sky): figure out if we can bend t5 to work too + can_use_flash_attn = can_use_flash_attn && mask->ne[2] == 1; + can_use_flash_attn = can_use_flash_attn && mask->ne[3] == 1; + } + + // TODO(Green-Sky): more pad or disable for funny tensor shapes + + ggml_tensor* kqv = nullptr; + // GGML_ASSERT((flash_attn && can_use_flash_attn) || !flash_attn); + if (can_use_flash_attn && flash_attn) { + // LOG_DEBUG("using flash attention"); + k = ggml_cast(ctx, k, GGML_TYPE_F16); + + v = ggml_cont(ctx, ggml_permute(ctx, v, 0, 2, 1, 3)); // [N, n_head, L_k, d_head] + v = ggml_reshape_3d(ctx, v, d_head, L_k, n_head * N); // [N * n_head, L_k, d_head] + v = ggml_cast(ctx, v, GGML_TYPE_F16); + + if (mask != nullptr) { + mask = ggml_transpose(ctx, mask); + + if (mask->ne[1] < GGML_PAD(q->ne[1], GGML_KQ_MASK_PAD)) { + LOG_DEBUG("mask dims %ld, %ld, %ld, %ld\n", mask->ne[0], mask->ne[1], mask->ne[2], mask->ne[3]); + LOG_DEBUG("needs padding, padding from %ld to %ld\n", mask->ne[1], GGML_PAD(q->ne[1], GGML_KQ_MASK_PAD)); + mask = ggml_pad(ctx, mask, 0, GGML_PAD(q->ne[1], GGML_KQ_MASK_PAD) - mask->ne[1], 0, 0); + } + + mask = ggml_cast(ctx, mask, GGML_TYPE_F16); + } + + kqv = ggml_flash_attn_ext(ctx, q, k, v, mask, scale, 0, 0); + ggml_flash_attn_ext_set_prec(kqv, GGML_PREC_F32); + + // kqv = ggml_view_3d(ctx, kqv, d_head, n_head, L_k, kqv->nb[1], kqv->nb[2], 0); + kqv = ggml_view_3d(ctx, kqv, d_head, n_head, L_q, kqv->nb[1], kqv->nb[2], 0); + } else { + v = ggml_cont(ctx, ggml_permute(ctx, v, 1, 2, 0, 3)); // [N, n_head, d_head, L_k] + v = ggml_reshape_3d(ctx, v, L_k, d_head, n_head * N); // [N * n_head, d_head, L_k] + + auto kq = ggml_mul_mat(ctx, k, q); // [N * n_head, L_q, L_k] + kq = ggml_scale_inplace(ctx, kq, scale); + if (mask) { + kq = ggml_add_inplace(ctx, kq, mask); + } + if (diag_mask_inf) { + kq = ggml_diag_mask_inf_inplace(ctx, kq, 0); + } + kq = ggml_soft_max_inplace(ctx, kq); + + kqv = ggml_mul_mat(ctx, v, kq); // [N * n_head, L_q, d_head] + + kqv = ggml_reshape_4d(ctx, kqv, d_head, L_q, n_head, N); // [N, n_head, L_q, d_head] + kqv = ggml_permute(ctx, kqv, 0, 2, 1, 3); // [N, L_q, n_head, d_head] + } + + kqv = ggml_cont(ctx, kqv); + kqv = ggml_reshape_3d(ctx, kqv, d_head * n_head, L_q, N); // [N, L_q, C] + + return kqv; +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_layer_norm(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b, + float eps = EPS) { + x = ggml_norm(ctx, x, eps); + if (w != NULL) { + x = ggml_mul(ctx, x, w); + if (b != NULL) { + x = ggml_add(ctx, x, b); + } + } + return x; +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_group_norm(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* w, + struct ggml_tensor* b, + int num_groups = 32) { + if (ggml_n_dims(x) >= 3 && w != NULL && b != NULL) { + w = ggml_reshape_4d(ctx, w, 1, 1, w->ne[0], 1); + b = ggml_reshape_4d(ctx, b, 1, 1, b->ne[0], 1); + } + + const float eps = 1e-6f; // default eps parameter + x = ggml_group_norm(ctx, x, num_groups, eps); + if (w != NULL && b != NULL) { + x = ggml_mul(ctx, x, w); + // b = ggml_repeat(ctx, b, x); + x = ggml_add(ctx, x, b); + } + return x; +} + +__STATIC_INLINE__ void ggml_backend_tensor_get_and_sync(ggml_backend_t backend, const struct ggml_tensor* tensor, void* data, size_t offset, size_t size) { +#if defined(SD_USE_CUDA) || defined(SD_USE_SYCL) + if (!ggml_backend_is_cpu(backend)) { + ggml_backend_tensor_get_async(backend, tensor, data, offset, size); + ggml_backend_synchronize(backend); + } else { + ggml_backend_tensor_get(tensor, data, offset, size); + } +#else + ggml_backend_tensor_get(tensor, data, offset, size); +#endif +} + +__STATIC_INLINE__ float ggml_backend_tensor_get_f32(ggml_tensor* tensor) { + GGML_ASSERT(tensor->type == GGML_TYPE_F32 || tensor->type == GGML_TYPE_F16); + float value; + if (tensor->type == GGML_TYPE_F32) { + ggml_backend_tensor_get(tensor, &value, 0, sizeof(value)); + } else { // GGML_TYPE_F16 + ggml_fp16_t f16_value; + ggml_backend_tensor_get(tensor, &f16_value, 0, sizeof(f16_value)); + value = ggml_fp16_to_fp32(f16_value); + } + return value; +} + +__STATIC_INLINE__ struct ggml_tensor* vector_to_ggml_tensor(struct ggml_context* ctx, + const std::vector& vec) { + struct ggml_tensor* t = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, vec.size()); + memcpy(t->data, (const void*)vec.data(), ggml_nbytes(t)); + return t; +} + +__STATIC_INLINE__ struct ggml_tensor* vector_to_ggml_tensor_i32(struct ggml_context* ctx, + const std::vector& vec) { + struct ggml_tensor* t = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, vec.size()); + memcpy(t->data, (const void*)vec.data(), ggml_nbytes(t)); + return t; +} + +__STATIC_INLINE__ std::vector arange(float start, float end, float step = 1.f) { + std::vector result; + + for (float value = start; value < end; value += step) { + result.push_back(value); + } + + return result; +} + +// Ref: https://github.com/CompVis/stable-diffusion/blob/main/ldm/modules/diffusionmodules/util.py#L151 +__STATIC_INLINE__ std::vector timestep_embedding(std::vector timesteps, + int dim, + int max_period = 10000) { + // timesteps: [N,] + // embedding: [N, dim] + size_t N = timesteps.size(); + int acutual_dim = dim; + if (dim % 2 != 0) { + acutual_dim = dim + 1; + } + std::vector embedding(N * acutual_dim, 0.f); + int half = dim / 2; + std::vector freqs(half); + for (int i = 0; i < half; ++i) { + freqs[i] = (float)std::exp(-std::log(max_period) * i / half); + } + for (int i = 0; i < N; ++i) { + for (int j = 0; j < half; ++j) { + float arg = timesteps[i] * freqs[j]; + embedding[i * acutual_dim + j] = std::cos(arg); + embedding[i * acutual_dim + j + half] = std::sin(arg); + } + } + return embedding; +} + +__STATIC_INLINE__ void set_timestep_embedding(std::vector timesteps, + struct ggml_tensor* embedding, + int dim, + int max_period = 10000) { + std::vector embedding_vec = timestep_embedding(timesteps, dim, max_period); + memcpy(((char*)embedding->data), ((char*)embedding_vec.data()), ggml_nbytes(embedding)); +} + +__STATIC_INLINE__ struct ggml_tensor* new_timestep_embedding(struct ggml_context* ctx, + std::vector timesteps, + int dim, + int max_period = 10000) { + // timesteps: [N,] + // embedding: [N, dim] + std::vector embedding_vec = timestep_embedding(timesteps, dim, max_period); + int acutual_dim = dim; + if (dim % 2 != 0) { + acutual_dim = dim + 1; + } + struct ggml_tensor* embedding = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, acutual_dim, timesteps.size()); + if (embedding->data != NULL) { + memcpy(((char*)embedding->data), ((char*)embedding_vec.data()), ggml_nbytes(embedding)); + } else { + ggml_backend_tensor_set(embedding, embedding_vec.data(), 0, ggml_nbytes(embedding)); + } + return embedding; +} + +__STATIC_INLINE__ struct ggml_tensor* ggml_nn_timestep_embedding( + struct ggml_context* ctx, + struct ggml_tensor* timesteps, + int dim, + int max_period = 10000, + float time_factor = 1.0f) { + timesteps = ggml_scale(ctx, timesteps, time_factor); + return ggml_timestep_embedding(ctx, timesteps, dim, max_period); +} + +__STATIC_INLINE__ size_t ggml_tensor_num(ggml_context* ctx) { + size_t num = 0; + for (ggml_tensor* t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + num++; + } + return num; +} + +/* SDXL with LoRA requires more space */ +#define MAX_PARAMS_TENSOR_NUM 32768 +#define MAX_GRAPH_SIZE 32768 + +struct GGMLRunner { +protected: + typedef std::function get_graph_cb_t; + + struct ggml_context* params_ctx = NULL; + ggml_backend_buffer_t params_buffer = NULL; + + struct ggml_context* compute_ctx = NULL; + struct ggml_gallocr* compute_allocr = NULL; + + std::map backend_tensor_data_map; + + ggml_backend_t backend = NULL; + + void alloc_params_ctx() { + struct ggml_init_params params; + params.mem_size = static_cast(MAX_PARAMS_TENSOR_NUM * ggml_tensor_overhead()); + params.mem_buffer = NULL; + params.no_alloc = true; + + params_ctx = ggml_init(params); + GGML_ASSERT(params_ctx != NULL); + } + + void free_params_ctx() { + if (params_ctx != NULL) { + ggml_free(params_ctx); + params_ctx = NULL; + } + } + + void alloc_compute_ctx() { + struct ggml_init_params params; + params.mem_size = static_cast(ggml_tensor_overhead() * MAX_GRAPH_SIZE + ggml_graph_overhead()); + params.mem_buffer = NULL; + params.no_alloc = true; + + compute_ctx = ggml_init(params); + GGML_ASSERT(compute_ctx != NULL); + } + + void free_compute_ctx() { + if (compute_ctx != NULL) { + ggml_free(compute_ctx); + compute_ctx = NULL; + } + } + + bool alloc_compute_buffer(get_graph_cb_t get_graph) { + if (compute_allocr != NULL) { + return true; + } + reset_compute_ctx(); + struct ggml_cgraph* gf = get_graph(); + backend_tensor_data_map.clear(); + compute_allocr = ggml_gallocr_new(ggml_backend_get_default_buffer_type(backend)); + + if (!ggml_gallocr_reserve(compute_allocr, gf)) { + // failed to allocate the compute buffer + LOG_ERROR("%s: failed to allocate the compute buffer\n", get_desc().c_str()); + free_compute_buffer(); + return false; + } + + // compute the required memory + size_t compute_buffer_size = ggml_gallocr_get_buffer_size(compute_allocr, 0); + LOG_DEBUG("%s compute buffer size: %.2f MB(%s)", + get_desc().c_str(), + compute_buffer_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(backend) ? "RAM" : "VRAM"); + return true; + } + + void cpy_data_to_backend_tensor() { + for (auto& kv : backend_tensor_data_map) { + auto tensor = kv.first; + auto data = kv.second; + + ggml_backend_tensor_set(tensor, data, 0, ggml_nbytes(tensor)); + } + + backend_tensor_data_map.clear(); + } + +public: + virtual std::string get_desc() = 0; + + GGMLRunner(ggml_backend_t backend) + : backend(backend) { + alloc_params_ctx(); + } + + virtual ~GGMLRunner() { + free_params_buffer(); + free_compute_buffer(); + free_params_ctx(); + free_compute_ctx(); + } + + void reset_compute_ctx() { + free_compute_ctx(); + alloc_compute_ctx(); + } + + bool alloc_params_buffer() { + size_t num_tensors = ggml_tensor_num(params_ctx); + params_buffer = ggml_backend_alloc_ctx_tensors(params_ctx, backend); + if (params_buffer == NULL) { + LOG_ERROR("%s alloc params backend buffer failed, num_tensors = %i", + get_desc().c_str(), + num_tensors); + return false; + } + size_t params_buffer_size = ggml_backend_buffer_get_size(params_buffer); + LOG_DEBUG("%s params backend buffer size = % 6.2f MB(%s) (%i tensors)", + get_desc().c_str(), + params_buffer_size / (1024.0 * 1024.0), + ggml_backend_is_cpu(backend) ? "RAM" : "VRAM", + num_tensors); + // printf("%s params backend buffer size = % 6.2f MB(%s) (%i tensors)\n", + // get_desc().c_str(), + // params_buffer_size / (1024.0 * 1024.0), + // ggml_backend_is_cpu(backend) ? "RAM" : "VRAM", + // num_tensors); + return true; + } + + void free_params_buffer() { + if (params_buffer != NULL) { + ggml_backend_buffer_free(params_buffer); + params_buffer = NULL; + } + } + + size_t get_params_buffer_size() { + if (params_buffer != NULL) { + return ggml_backend_buffer_get_size(params_buffer); + } + return 0; + } + + void free_compute_buffer() { + if (compute_allocr != NULL) { + ggml_gallocr_free(compute_allocr); + compute_allocr = NULL; + } + } + + // do copy after alloc graph + void set_backend_tensor_data(struct ggml_tensor* tensor, const void* data) { + backend_tensor_data_map[tensor] = data; + } + + struct ggml_tensor* to_backend(struct ggml_tensor* tensor) { + GGML_ASSERT(compute_ctx != NULL); + if (tensor == NULL) { + return NULL; + } + // it's performing a compute, check if backend isn't cpu + if (!ggml_backend_is_cpu(backend) && (tensor->buffer == NULL || ggml_backend_buffer_is_host(tensor->buffer))) { + // pass input tensors to gpu memory + auto backend_tensor = ggml_dup_tensor(compute_ctx, tensor); + + set_backend_tensor_data(backend_tensor, tensor->data); + return backend_tensor; + } else { + return tensor; + } + } + + void compute(get_graph_cb_t get_graph, + int n_threads, + bool free_compute_buffer_immediately = true, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL) { + alloc_compute_buffer(get_graph); + reset_compute_ctx(); + struct ggml_cgraph* gf = get_graph(); + GGML_ASSERT(ggml_gallocr_alloc_graph(compute_allocr, gf)); + cpy_data_to_backend_tensor(); + if (ggml_backend_is_cpu(backend)) { + ggml_backend_cpu_set_n_threads(backend, n_threads); + } + + ggml_backend_graph_compute(backend, gf); +#ifdef GGML_PERF + ggml_graph_print(gf); +#endif + if (output != NULL) { + auto result = ggml_graph_node(gf, -1); + if (*output == NULL && output_ctx != NULL) { + *output = ggml_dup_tensor(output_ctx, result); + } + if (*output != NULL) { + ggml_backend_tensor_get_and_sync(backend, result, (*output)->data, 0, ggml_nbytes(*output)); + } + } + + if (free_compute_buffer_immediately) { + free_compute_buffer(); + } + } +}; + +class GGMLBlock { +protected: + typedef std::unordered_map ParameterMap; + typedef std::unordered_map> GGMLBlockMap; + GGMLBlockMap blocks; + ParameterMap params; + + void init_blocks(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + for (auto& pair : blocks) { + auto& block = pair.second; + block->init(ctx, tensor_types, prefix + pair.first); + } + } + + virtual void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") {} + +public: + void init(struct ggml_context* ctx, std::map& tensor_types, std::string prefix = "") { + if (prefix.size() > 0) { + prefix = prefix + "."; + } + init_blocks(ctx, tensor_types, prefix); + init_params(ctx, tensor_types, prefix); + } + + size_t get_params_num() { + size_t num_tensors = params.size(); + for (auto& pair : blocks) { + auto& block = pair.second; + + num_tensors += block->get_params_num(); + } + return num_tensors; + }; + + size_t get_params_mem_size() { + size_t mem_size = 0; + for (auto& pair : blocks) { + auto& block = pair.second; + + mem_size += block->get_params_mem_size(); + } + + for (auto& pair : params) { + mem_size += ggml_nbytes(pair.second); + } + + return mem_size; + } + + void get_param_tensors(std::map& tensors, std::string prefix = "") { + if (prefix.size() > 0) { + prefix = prefix + "."; + } + for (auto& pair : blocks) { + auto& block = pair.second; + block->get_param_tensors(tensors, prefix + pair.first); + } + + for (auto& pair : params) { + struct ggml_tensor* param = pair.second; + tensors[prefix + pair.first] = pair.second; + } + } +}; + +class UnaryBlock : public GGMLBlock { +public: + virtual struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) = 0; +}; + +class Linear : public UnaryBlock { +protected: + int64_t in_features; + int64_t out_features; + bool bias; + bool force_f32; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = (tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + if (in_features % ggml_blck_size(wtype) != 0 || force_f32) { + wtype = GGML_TYPE_F32; + } + params["weight"] = ggml_new_tensor_2d(ctx, wtype, in_features, out_features); + if (bias) { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.ypes.find(prefix + "bias") != tensor_types.end()) ? tensor_types[prefix + "bias"] : GGML_TYPE_F32; + params["bias"] = ggml_new_tensor_1d(ctx, wtype, out_features); + } + } + +public: + Linear(int64_t in_features, + int64_t out_features, + bool bias = true, + bool force_f32 = false) + : in_features(in_features), + out_features(out_features), + bias(bias), + force_f32(force_f32) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + struct ggml_tensor* b = NULL; + if (bias) { + b = params["bias"]; + } + return ggml_nn_linear(ctx, x, w, b); + } +}; + +class Embedding : public UnaryBlock { +protected: + int64_t embedding_dim; + int64_t num_embeddings; + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = (tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + params["weight"] = ggml_new_tensor_2d(ctx, wtype, embedding_dim, num_embeddings); + } + +public: + Embedding(int64_t num_embeddings, int64_t embedding_dim) + : embedding_dim(embedding_dim), + num_embeddings(num_embeddings) { + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids) { + // input_ids: [N, n_token] + auto weight = params["weight"]; + + // There are issues with ggml batch inference, so we are expanding it here first. + // TODO: fix ggml batch inference + int64_t n = input_ids->ne[1]; + input_ids = ggml_reshape_1d(ctx, input_ids, input_ids->ne[0] * input_ids->ne[1]); + + input_ids = ggml_reshape_3d(ctx, input_ids, input_ids->ne[0], 1, input_ids->ne[1]); + auto embedding = ggml_get_rows(ctx, weight, input_ids); + embedding = ggml_reshape_3d(ctx, embedding, embedding->ne[0], embedding->ne[1] / n, n); + + // [N, n_token, embedding_dim] + return embedding; + } +}; + +class Conv2d : public UnaryBlock { +protected: + int64_t in_channels; + int64_t out_channels; + std::pair kernel_size; + std::pair stride; + std::pair padding; + std::pair dilation; + bool bias; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = GGML_TYPE_F16; //(tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F16; + params["weight"] = ggml_new_tensor_4d(ctx, wtype, kernel_size.second, kernel_size.first, in_channels, out_channels); + if (bias) { + enum ggml_type wtype = GGML_TYPE_F32; // (tensor_types.find(prefix + "bias") != tensor_types.end()) ? tensor_types[prefix + "bias"] : GGML_TYPE_F32; + params["bias"] = ggml_new_tensor_1d(ctx, wtype, out_channels); + } + } + +public: + Conv2d(int64_t in_channels, + int64_t out_channels, + std::pair kernel_size, + std::pair stride = {1, 1}, + std::pair padding = {0, 0}, + std::pair dilation = {1, 1}, + bool bias = true) + : in_channels(in_channels), + out_channels(out_channels), + kernel_size(kernel_size), + stride(stride), + padding(padding), + dilation(dilation), + bias(bias) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + struct ggml_tensor* b = NULL; + if (bias) { + b = params["bias"]; + } + return ggml_nn_conv_2d(ctx, x, w, b, stride.second, stride.first, padding.second, padding.first, dilation.second, dilation.first); + } +}; + +class Conv3dnx1x1 : public UnaryBlock { +protected: + int64_t in_channels; + int64_t out_channels; + int64_t kernel_size; + int64_t stride; + int64_t padding; + int64_t dilation; + bool bias; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = GGML_TYPE_F16; //(tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F16; + params["weight"] = ggml_new_tensor_4d(ctx, wtype, 1, kernel_size, in_channels, out_channels); // 5d => 4d + if (bias) { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "bias") != tensor_types.end()) ? tensor_types[prefix + "bias"] : GGML_TYPE_F32; + params["bias"] = ggml_new_tensor_1d(ctx, wtype, out_channels); + } + } + +public: + Conv3dnx1x1(int64_t in_channels, + int64_t out_channels, + int64_t kernel_size, + int64_t stride = 1, + int64_t padding = 0, + int64_t dilation = 1, + bool bias = true) + : in_channels(in_channels), + out_channels(out_channels), + kernel_size(kernel_size), + stride(stride), + padding(padding), + dilation(dilation), + bias(bias) {} + + // x: [N, IC, ID, IH*IW] + // result: [N, OC, OD, OH*OW] + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + struct ggml_tensor* b = NULL; + if (bias) { + b = params["bias"]; + } + return ggml_nn_conv_3d_nx1x1(ctx, x, w, b, stride, padding, dilation); + } +}; + +class LayerNorm : public UnaryBlock { +protected: + int64_t normalized_shape; + float eps; + bool elementwise_affine; + bool bias; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + if (elementwise_affine) { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.ypes.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + params["weight"] = ggml_new_tensor_1d(ctx, wtype, normalized_shape); + if (bias) { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.ypes.find(prefix + "bias") != tensor_types.end()) ? tensor_types[prefix + "bias"] : GGML_TYPE_F32; + params["bias"] = ggml_new_tensor_1d(ctx, wtype, normalized_shape); + } + } + } + +public: + LayerNorm(int64_t normalized_shape, + float eps = 1e-05f, + bool elementwise_affine = true, + bool bias = true) + : normalized_shape(normalized_shape), + eps(eps), + elementwise_affine(elementwise_affine), + bias(bias) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = NULL; + struct ggml_tensor* b = NULL; + + if (elementwise_affine) { + w = params["weight"]; + if (bias) { + b = params["bias"]; + } + } + return ggml_nn_layer_norm(ctx, x, w, b, eps); + } +}; + +class GroupNorm : public GGMLBlock { +protected: + int64_t num_groups; + int64_t num_channels; + float eps; + bool affine; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + if (affine) { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + enum ggml_type bias_wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "bias") != tensor_types.end()) ? tensor_types[prefix + "bias"] : GGML_TYPE_F32; + params["weight"] = ggml_new_tensor_1d(ctx, wtype, num_channels); + params["bias"] = ggml_new_tensor_1d(ctx, bias_wtype, num_channels); + } + } + +public: + GroupNorm(int64_t num_groups, + int64_t num_channels, + float eps = 1e-05f, + bool affine = true) + : num_groups(num_groups), + num_channels(num_channels), + eps(eps), + affine(affine) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = NULL; + struct ggml_tensor* b = NULL; + if (affine) { + w = params["weight"]; + b = params["bias"]; + } + return ggml_nn_group_norm(ctx, x, w, b, num_groups); + } +}; + +class GroupNorm32 : public GroupNorm { +public: + GroupNorm32(int64_t num_channels) + : GroupNorm(32, num_channels, 1e-06f) {} +}; + +class MultiheadAttention : public GGMLBlock { +protected: + int64_t embed_dim; + int64_t n_head; + std::string q_proj_name; + std::string k_proj_name; + std::string v_proj_name; + std::string out_proj_name; + +public: + MultiheadAttention(int64_t embed_dim, + int64_t n_head, + bool qkv_proj_bias = true, + bool out_proj_bias = true, + std::string q_proj_name = "q_proj", + std::string k_proj_name = "k_proj", + std::string v_proj_name = "v_proj", + std::string out_proj_name = "out_proj") + : embed_dim(embed_dim), + n_head(n_head), + q_proj_name(q_proj_name), + k_proj_name(k_proj_name), + v_proj_name(v_proj_name), + out_proj_name(out_proj_name) { + blocks[q_proj_name] = std::shared_ptr(new Linear(embed_dim, embed_dim, qkv_proj_bias)); + blocks[k_proj_name] = std::shared_ptr(new Linear(embed_dim, embed_dim, qkv_proj_bias)); + blocks[v_proj_name] = std::shared_ptr(new Linear(embed_dim, embed_dim, qkv_proj_bias)); + blocks[out_proj_name] = std::shared_ptr(new Linear(embed_dim, embed_dim, out_proj_bias)); + } + + // x: [N, n_token, embed_dim] + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, bool mask = false) { + auto q_proj = std::dynamic_pointer_cast(blocks[q_proj_name]); + auto k_proj = std::dynamic_pointer_cast(blocks[k_proj_name]); + auto v_proj = std::dynamic_pointer_cast(blocks[v_proj_name]); + auto out_proj = std::dynamic_pointer_cast(blocks[out_proj_name]); + + struct ggml_tensor* q = q_proj->forward(ctx, x); + struct ggml_tensor* k = k_proj->forward(ctx, x); + struct ggml_tensor* v = v_proj->forward(ctx, x); + + x = ggml_nn_attention_ext(ctx, q, k, v, n_head, NULL, mask); // [N, n_token, embed_dim] + + x = out_proj->forward(ctx, x); // [N, n_token, embed_dim] + return x; + } +}; + +#endif // __GGML_EXTEND__HPP__ diff --git a/gits_noise.inl b/gits_noise.inl new file mode 100644 index 000000000..7a10ff76f --- /dev/null +++ b/gits_noise.inl @@ -0,0 +1,349 @@ +#ifndef GITS_NOISE_INL +#define GITS_NOISE_INL + +const std::vector> GITS_NOISE_0_80 = { + { 14.61464119f, 7.49001646f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 6.77309084f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 3.07277966f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 2.05039096f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 2.05039096f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 8.75849152f, 7.49001646f, 5.85520077f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 8.75849152f, 7.49001646f, 5.85520077f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 8.75849152f, 7.49001646f, 5.85520077f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 5.85520077f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.07277966f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.75859547f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.19567990f, 1.98035145f, 0.86115354f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.75859547f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.19567990f, 1.98035145f, 0.86115354f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.75859547f, 9.24142551f, 8.75849152f, 8.30717278f, 7.88507891f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.07277966f, 1.84880662f, 0.83188516f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_0_85 = { + { 14.61464119f, 7.49001646f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 1.84880662f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 6.77309084f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.11996698f, 3.07277966f, 1.24153244f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.09240818f, 2.84484982f, 0.95350921f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.09240818f, 2.84484982f, 0.95350921f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.58536053f, 3.19567990f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 8.75849152f, 7.49001646f, 5.58536053f, 3.19567990f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 8.75849152f, 7.49001646f, 6.14220476f, 4.65472794f, 3.07277966f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 8.75849152f, 7.49001646f, 6.14220476f, 4.65472794f, 3.07277966f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.65472794f, 3.07277966f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.65472794f, 3.07277966f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.65472794f, 3.07277966f, 1.84880662f, 0.803307f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.60512662f, 2.63833880f, 1.56271636f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.46139455f, 2.45070267f, 1.56271636f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.46139455f, 2.45070267f, 1.56271636f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.46139455f, 2.45070267f, 1.56271636f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.75859547f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.46139455f, 2.45070267f, 1.56271636f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.90732002f, 10.31284904f, 9.75859547f, 9.24142551f, 8.75849152f, 8.30717278f, 7.88507891f, 7.49001646f, 6.77309084f, 5.85520077f, 4.65472794f, 3.46139455f, 2.45070267f, 1.56271636f, 0.72133851f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_0_90 = { + { 14.61464119f, 6.77309084f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 3.07277966f, 0.95350921f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.54230714f, 0.89115214f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 4.86714602f, 2.54230714f, 0.89115214f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.09240818f, 3.07277966f, 1.61558151f, 0.69515091f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.11996698f, 4.86714602f, 3.07277966f, 1.61558151f, 0.69515091f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 2.95596409f, 1.61558151f, 0.69515091f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.19988537f, 1.24153244f, 0.57119018f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 10.90732002f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.19988537f, 1.24153244f, 0.57119018f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 9.24142551f, 8.30717278f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.19988537f, 1.24153244f, 0.57119018f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.75677586f, 2.84484982f, 1.84880662f, 1.08895338f, 0.52423614f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 4.86714602f, 3.75677586f, 2.84484982f, 1.84880662f, 1.08895338f, 0.52423614f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.44769001f, 5.58536053f, 4.45427561f, 3.32507086f, 2.45070267f, 1.61558151f, 0.95350921f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.44769001f, 5.58536053f, 4.45427561f, 3.32507086f, 2.45070267f, 1.61558151f, 0.95350921f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.86714602f, 3.91689563f, 3.07277966f, 2.27973175f, 1.56271636f, 0.95350921f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.86714602f, 3.91689563f, 3.07277966f, 2.27973175f, 1.56271636f, 0.95350921f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 4.86714602f, 3.91689563f, 3.07277966f, 2.27973175f, 1.56271636f, 0.95350921f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.96784878f, 12.23089790f, 11.54541874f, 10.31284904f, 9.24142551f, 8.75849152f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 5.09240818f, 4.45427561f, 3.60512662f, 2.95596409f, 2.19988537f, 1.51179266f, 0.89115214f, 0.43325692f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_0_95 = { + { 14.61464119f, 6.77309084f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 2.84484982f, 0.89115214f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.36326075f, 0.803307f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.95596409f, 1.56271636f, 0.64427125f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 4.86714602f, 2.95596409f, 1.56271636f, 0.64427125f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 4.86714602f, 3.07277966f, 1.91321158f, 1.08895338f, 0.50118381f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.07277966f, 1.91321158f, 1.08895338f, 0.50118381f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 3.07277966f, 1.91321158f, 1.08895338f, 0.50118381f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.19988537f, 1.41535246f, 0.803307f, 0.38853383f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 8.75849152f, 7.49001646f, 5.85520077f, 4.65472794f, 3.46139455f, 2.63833880f, 1.84880662f, 1.24153244f, 0.72133851f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 10.90732002f, 8.75849152f, 7.49001646f, 5.85520077f, 4.65472794f, 3.46139455f, 2.63833880f, 1.84880662f, 1.24153244f, 0.72133851f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 10.90732002f, 8.75849152f, 7.49001646f, 6.14220476f, 4.86714602f, 3.75677586f, 2.95596409f, 2.19988537f, 1.56271636f, 1.05362725f, 0.64427125f, 0.32104823f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 10.90732002f, 8.75849152f, 7.49001646f, 6.44769001f, 5.58536053f, 4.65472794f, 3.60512662f, 2.95596409f, 2.19988537f, 1.56271636f, 1.05362725f, 0.64427125f, 0.32104823f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 9.24142551f, 8.30717278f, 7.49001646f, 6.44769001f, 5.58536053f, 4.65472794f, 3.60512662f, 2.95596409f, 2.19988537f, 1.56271636f, 1.05362725f, 0.64427125f, 0.32104823f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 9.24142551f, 8.30717278f, 7.49001646f, 6.44769001f, 5.58536053f, 4.65472794f, 3.75677586f, 3.07277966f, 2.45070267f, 1.78698075f, 1.24153244f, 0.83188516f, 0.50118381f, 0.22545385f, 0.02916753f }, + { 14.61464119f, 12.96784878f, 11.54541874f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 5.09240818f, 4.45427561f, 3.60512662f, 2.95596409f, 2.36326075f, 1.72759056f, 1.24153244f, 0.83188516f, 0.50118381f, 0.22545385f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 5.09240818f, 4.45427561f, 3.60512662f, 2.95596409f, 2.36326075f, 1.72759056f, 1.24153244f, 0.83188516f, 0.50118381f, 0.22545385f, 0.02916753f }, + { 14.61464119f, 13.76078796f, 12.23089790f, 10.90732002f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 5.09240818f, 4.45427561f, 3.75677586f, 3.07277966f, 2.45070267f, 1.91321158f, 1.46270394f, 1.05362725f, 0.72133851f, 0.43325692f, 0.19894916f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_00 = { + { 14.61464119f, 1.56271636f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 0.95350921f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 2.36326075f, 0.803307f, 0.02916753f }, + { 14.61464119f, 7.11996698f, 3.07277966f, 1.56271636f, 0.59516323f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.84484982f, 1.41535246f, 0.57119018f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.84484982f, 1.61558151f, 0.86115354f, 0.38853383f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 4.86714602f, 2.84484982f, 1.61558151f, 0.86115354f, 0.38853383f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 4.86714602f, 3.07277966f, 1.98035145f, 1.24153244f, 0.72133851f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.07277966f, 1.98035145f, 1.24153244f, 0.72133851f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.27973175f, 1.51179266f, 0.95350921f, 0.54755926f, 0.25053367f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.36326075f, 1.61558151f, 1.08895338f, 0.72133851f, 0.41087446f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.36326075f, 1.61558151f, 1.08895338f, 0.72133851f, 0.41087446f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.84484982f, 2.12350607f, 1.56271636f, 1.08895338f, 0.72133851f, 0.41087446f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.84484982f, 2.19988537f, 1.61558151f, 1.162866f, 0.803307f, 0.50118381f, 0.27464288f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 5.85520077f, 4.65472794f, 3.75677586f, 3.07277966f, 2.45070267f, 1.84880662f, 1.36964464f, 1.01931262f, 0.72133851f, 0.45573691f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 6.14220476f, 5.09240818f, 4.26497746f, 3.46139455f, 2.84484982f, 2.19988537f, 1.67050016f, 1.24153244f, 0.92192322f, 0.64427125f, 0.43325692f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.75849152f, 7.49001646f, 6.14220476f, 5.09240818f, 4.26497746f, 3.60512662f, 2.95596409f, 2.45070267f, 1.91321158f, 1.51179266f, 1.12534678f, 0.83188516f, 0.59516323f, 0.38853383f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 9.24142551f, 8.30717278f, 7.49001646f, 6.14220476f, 5.09240818f, 4.26497746f, 3.60512662f, 2.95596409f, 2.45070267f, 1.91321158f, 1.51179266f, 1.12534678f, 0.83188516f, 0.59516323f, 0.38853383f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 12.23089790f, 9.24142551f, 8.30717278f, 7.49001646f, 6.77309084f, 5.85520077f, 5.09240818f, 4.26497746f, 3.60512662f, 2.95596409f, 2.45070267f, 1.91321158f, 1.51179266f, 1.12534678f, 0.83188516f, 0.59516323f, 0.38853383f, 0.22545385f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_05 = { + { 14.61464119f, 0.95350921f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 0.89115214f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 2.05039096f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 2.84484982f, 1.28281462f, 0.52423614f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.07277966f, 1.61558151f, 0.803307f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.84484982f, 1.56271636f, 0.803307f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.84484982f, 1.61558151f, 0.95350921f, 0.52423614f, 0.22545385f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 1.98035145f, 1.24153244f, 0.74807048f, 0.41087446f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.27973175f, 1.51179266f, 0.95350921f, 0.59516323f, 0.34370604f, 0.13792117f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.09240818f, 3.46139455f, 2.45070267f, 1.61558151f, 1.08895338f, 0.72133851f, 0.45573691f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.09240818f, 3.46139455f, 2.45070267f, 1.61558151f, 1.08895338f, 0.72133851f, 0.45573691f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.36326075f, 1.61558151f, 1.08895338f, 0.72133851f, 0.45573691f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.45070267f, 1.72759056f, 1.24153244f, 0.86115354f, 0.59516323f, 0.38853383f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.84484982f, 2.19988537f, 1.61558151f, 1.162866f, 0.83188516f, 0.59516323f, 0.38853383f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.84484982f, 2.19988537f, 1.67050016f, 1.28281462f, 0.95350921f, 0.72133851f, 0.52423614f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.95596409f, 2.36326075f, 1.84880662f, 1.41535246f, 1.08895338f, 0.83188516f, 0.61951244f, 0.45573691f, 0.32104823f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.65472794f, 3.60512662f, 2.95596409f, 2.45070267f, 1.91321158f, 1.51179266f, 1.20157266f, 0.95350921f, 0.74807048f, 0.57119018f, 0.43325692f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.30717278f, 7.11996698f, 5.85520077f, 4.65472794f, 3.60512662f, 2.95596409f, 2.45070267f, 1.91321158f, 1.51179266f, 1.20157266f, 0.95350921f, 0.74807048f, 0.57119018f, 0.43325692f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 8.30717278f, 7.11996698f, 5.85520077f, 4.65472794f, 3.60512662f, 2.95596409f, 2.45070267f, 1.98035145f, 1.61558151f, 1.32549286f, 1.08895338f, 0.86115354f, 0.69515091f, 0.54755926f, 0.41087446f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_10 = { + { 14.61464119f, 0.89115214f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 1.61558151f, 0.57119018f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 2.45070267f, 1.08895338f, 0.45573691f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 2.95596409f, 1.56271636f, 0.803307f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.07277966f, 1.61558151f, 0.89115214f, 0.4783645f, 0.19894916f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.07277966f, 1.84880662f, 1.08895338f, 0.64427125f, 0.34370604f, 0.13792117f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.84484982f, 1.61558151f, 0.95350921f, 0.54755926f, 0.27464288f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.95596409f, 1.91321158f, 1.24153244f, 0.803307f, 0.4783645f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 2.05039096f, 1.41535246f, 0.95350921f, 0.64427125f, 0.41087446f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.27973175f, 1.61558151f, 1.12534678f, 0.803307f, 0.54755926f, 0.36617002f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.32507086f, 2.45070267f, 1.72759056f, 1.24153244f, 0.89115214f, 0.64427125f, 0.45573691f, 0.32104823f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.09240818f, 3.60512662f, 2.84484982f, 2.05039096f, 1.51179266f, 1.08895338f, 0.803307f, 0.59516323f, 0.43325692f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.09240818f, 3.60512662f, 2.84484982f, 2.12350607f, 1.61558151f, 1.24153244f, 0.95350921f, 0.72133851f, 0.54755926f, 0.41087446f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.45070267f, 1.84880662f, 1.41535246f, 1.08895338f, 0.83188516f, 0.64427125f, 0.50118381f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.85520077f, 4.45427561f, 3.19567990f, 2.45070267f, 1.91321158f, 1.51179266f, 1.20157266f, 0.95350921f, 0.74807048f, 0.59516323f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 5.85520077f, 4.45427561f, 3.46139455f, 2.84484982f, 2.19988537f, 1.72759056f, 1.36964464f, 1.08895338f, 0.86115354f, 0.69515091f, 0.54755926f, 0.43325692f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.46139455f, 2.84484982f, 2.19988537f, 1.72759056f, 1.36964464f, 1.08895338f, 0.86115354f, 0.69515091f, 0.54755926f, 0.43325692f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 11.54541874f, 7.49001646f, 5.85520077f, 4.45427561f, 3.46139455f, 2.84484982f, 2.19988537f, 1.72759056f, 1.36964464f, 1.08895338f, 0.89115214f, 0.72133851f, 0.59516323f, 0.4783645f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_15 = { + { 14.61464119f, 0.83188516f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.59516323f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 1.56271636f, 0.52423614f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 1.91321158f, 0.83188516f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.24153244f, 0.59516323f, 0.25053367f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.51179266f, 0.803307f, 0.41087446f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.56271636f, 0.89115214f, 0.50118381f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.07277966f, 1.84880662f, 1.12534678f, 0.72133851f, 0.43325692f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.07277966f, 1.91321158f, 1.24153244f, 0.803307f, 0.52423614f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 2.95596409f, 1.91321158f, 1.24153244f, 0.803307f, 0.52423614f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 2.05039096f, 1.36964464f, 0.95350921f, 0.69515091f, 0.4783645f, 0.32104823f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 2.12350607f, 1.51179266f, 1.08895338f, 0.803307f, 0.59516323f, 0.43325692f, 0.29807833f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 2.12350607f, 1.51179266f, 1.08895338f, 0.803307f, 0.59516323f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.07277966f, 2.19988537f, 1.61558151f, 1.24153244f, 0.95350921f, 0.74807048f, 0.59516323f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.45070267f, 1.78698075f, 1.32549286f, 1.01931262f, 0.803307f, 0.64427125f, 0.50118381f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.45070267f, 1.78698075f, 1.32549286f, 1.01931262f, 0.803307f, 0.64427125f, 0.52423614f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.45070267f, 1.84880662f, 1.41535246f, 1.12534678f, 0.89115214f, 0.72133851f, 0.59516323f, 0.4783645f, 0.38853383f, 0.32104823f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.86714602f, 3.19567990f, 2.45070267f, 1.84880662f, 1.41535246f, 1.12534678f, 0.89115214f, 0.72133851f, 0.59516323f, 0.50118381f, 0.41087446f, 0.34370604f, 0.29807833f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_20 = { + { 14.61464119f, 0.803307f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.52423614f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 0.92192322f, 0.36617002f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.24153244f, 0.59516323f, 0.25053367f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.05039096f, 0.95350921f, 0.45573691f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.24153244f, 0.64427125f, 0.29807833f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.36964464f, 0.803307f, 0.45573691f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 0.95350921f, 0.59516323f, 0.36617002f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.67050016f, 1.08895338f, 0.74807048f, 0.50118381f, 0.32104823f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.84880662f, 1.24153244f, 0.83188516f, 0.59516323f, 0.41087446f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 3.07277966f, 1.98035145f, 1.36964464f, 0.95350921f, 0.69515091f, 0.50118381f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.46139455f, 2.36326075f, 1.56271636f, 1.08895338f, 0.803307f, 0.59516323f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 6.77309084f, 3.46139455f, 2.45070267f, 1.61558151f, 1.162866f, 0.86115354f, 0.64427125f, 0.50118381f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.12350607f, 1.51179266f, 1.08895338f, 0.83188516f, 0.64427125f, 0.50118381f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.12350607f, 1.51179266f, 1.08895338f, 0.83188516f, 0.64427125f, 0.50118381f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.12350607f, 1.51179266f, 1.08895338f, 0.83188516f, 0.64427125f, 0.50118381f, 0.41087446f, 0.34370604f, 0.27464288f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.19988537f, 1.61558151f, 1.20157266f, 0.92192322f, 0.72133851f, 0.57119018f, 0.45573691f, 0.36617002f, 0.29807833f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.19988537f, 1.61558151f, 1.24153244f, 0.95350921f, 0.74807048f, 0.59516323f, 0.4783645f, 0.38853383f, 0.32104823f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 7.49001646f, 4.65472794f, 3.07277966f, 2.19988537f, 1.61558151f, 1.24153244f, 0.95350921f, 0.74807048f, 0.59516323f, 0.50118381f, 0.41087446f, 0.34370604f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_25 = { + { 14.61464119f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.50118381f, 0.02916753f }, + { 14.61464119f, 2.05039096f, 0.803307f, 0.32104823f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 0.95350921f, 0.43325692f, 0.17026083f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.24153244f, 0.59516323f, 0.27464288f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.51179266f, 0.803307f, 0.43325692f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.36326075f, 1.24153244f, 0.72133851f, 0.41087446f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.36964464f, 0.83188516f, 0.52423614f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 0.98595673f, 0.64427125f, 0.43325692f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.67050016f, 1.08895338f, 0.74807048f, 0.52423614f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.72759056f, 1.162866f, 0.803307f, 0.59516323f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.84880662f, 1.24153244f, 0.86115354f, 0.64427125f, 0.4783645f, 0.36617002f, 0.27464288f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.84880662f, 1.28281462f, 0.92192322f, 0.69515091f, 0.52423614f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.91321158f, 1.32549286f, 0.95350921f, 0.72133851f, 0.54755926f, 0.43325692f, 0.34370604f, 0.27464288f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.91321158f, 1.32549286f, 0.95350921f, 0.72133851f, 0.57119018f, 0.45573691f, 0.36617002f, 0.29807833f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.95596409f, 1.91321158f, 1.32549286f, 0.95350921f, 0.74807048f, 0.59516323f, 0.4783645f, 0.38853383f, 0.32104823f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 3.07277966f, 2.05039096f, 1.41535246f, 1.05362725f, 0.803307f, 0.61951244f, 0.50118381f, 0.41087446f, 0.34370604f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 3.07277966f, 2.05039096f, 1.41535246f, 1.05362725f, 0.803307f, 0.64427125f, 0.52423614f, 0.43325692f, 0.36617002f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 3.07277966f, 2.05039096f, 1.46270394f, 1.08895338f, 0.83188516f, 0.66947293f, 0.54755926f, 0.45573691f, 0.38853383f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_30 = { + { 14.61464119f, 0.72133851f, 0.02916753f }, + { 14.61464119f, 1.24153244f, 0.43325692f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.59516323f, 0.22545385f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.803307f, 0.36617002f, 0.13792117f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 1.01931262f, 0.52423614f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.36964464f, 0.74807048f, 0.41087446f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.56271636f, 0.89115214f, 0.54755926f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.61558151f, 0.95350921f, 0.61951244f, 0.41087446f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.36964464f, 0.83188516f, 0.54755926f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.41535246f, 0.92192322f, 0.64427125f, 0.45573691f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.6383388f, 1.56271636f, 1.01931262f, 0.72133851f, 0.50118381f, 0.36617002f, 0.27464288f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 1.05362725f, 0.74807048f, 0.54755926f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 1.08895338f, 0.77538133f, 0.57119018f, 0.43325692f, 0.34370604f, 0.27464288f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 1.08895338f, 0.803307f, 0.59516323f, 0.45573691f, 0.36617002f, 0.29807833f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.61558151f, 1.08895338f, 0.803307f, 0.59516323f, 0.4783645f, 0.38853383f, 0.32104823f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.72759056f, 1.162866f, 0.83188516f, 0.64427125f, 0.50118381f, 0.41087446f, 0.34370604f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.72759056f, 1.162866f, 0.83188516f, 0.64427125f, 0.52423614f, 0.43325692f, 0.36617002f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.78698075f, 1.24153244f, 0.92192322f, 0.72133851f, 0.57119018f, 0.45573691f, 0.38853383f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.84484982f, 1.78698075f, 1.24153244f, 0.92192322f, 0.72133851f, 0.57119018f, 0.4783645f, 0.41087446f, 0.36617002f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_35 = { + { 14.61464119f, 0.69515091f, 0.02916753f }, + { 14.61464119f, 0.95350921f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.57119018f, 0.19894916f, 0.02916753f }, + { 14.61464119f, 1.61558151f, 0.69515091f, 0.29807833f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.83188516f, 0.43325692f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.162866f, 0.64427125f, 0.36617002f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.36964464f, 0.803307f, 0.50118381f, 0.32104823f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.41535246f, 0.83188516f, 0.54755926f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 0.95350921f, 0.64427125f, 0.45573691f, 0.32104823f, 0.22545385f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 0.95350921f, 0.64427125f, 0.45573691f, 0.34370604f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.61558151f, 1.01931262f, 0.72133851f, 0.52423614f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.61558151f, 1.01931262f, 0.72133851f, 0.52423614f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.61558151f, 1.05362725f, 0.74807048f, 0.54755926f, 0.43325692f, 0.34370604f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.72759056f, 1.12534678f, 0.803307f, 0.59516323f, 0.45573691f, 0.36617002f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 3.07277966f, 1.72759056f, 1.12534678f, 0.803307f, 0.59516323f, 0.4783645f, 0.38853383f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.45070267f, 1.51179266f, 1.01931262f, 0.74807048f, 0.57119018f, 0.45573691f, 0.36617002f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.6383388f, 1.61558151f, 1.08895338f, 0.803307f, 0.61951244f, 0.50118381f, 0.41087446f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.6383388f, 1.61558151f, 1.08895338f, 0.803307f, 0.64427125f, 0.52423614f, 0.43325692f, 0.36617002f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 5.85520077f, 2.6383388f, 1.61558151f, 1.08895338f, 0.803307f, 0.64427125f, 0.52423614f, 0.45573691f, 0.38853383f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_40 = { + { 14.61464119f, 0.59516323f, 0.02916753f }, + { 14.61464119f, 0.95350921f, 0.34370604f, 0.02916753f }, + { 14.61464119f, 1.08895338f, 0.43325692f, 0.13792117f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.64427125f, 0.27464288f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.61558151f, 0.803307f, 0.43325692f, 0.22545385f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.05039096f, 0.95350921f, 0.54755926f, 0.34370604f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.24153244f, 0.72133851f, 0.43325692f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.24153244f, 0.74807048f, 0.50118381f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.803307f, 0.52423614f, 0.36617002f, 0.27464288f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.803307f, 0.54755926f, 0.38853383f, 0.29807833f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.41535246f, 0.86115354f, 0.59516323f, 0.43325692f, 0.32104823f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.51179266f, 0.95350921f, 0.64427125f, 0.45573691f, 0.34370604f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.51179266f, 0.95350921f, 0.64427125f, 0.4783645f, 0.36617002f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 0.98595673f, 0.69515091f, 0.52423614f, 0.41087446f, 0.34370604f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 1.01931262f, 0.72133851f, 0.54755926f, 0.43325692f, 0.36617002f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.61558151f, 1.05362725f, 0.74807048f, 0.57119018f, 0.45573691f, 0.38853383f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.61558151f, 1.08895338f, 0.803307f, 0.61951244f, 0.50118381f, 0.41087446f, 0.36617002f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.61558151f, 1.08895338f, 0.803307f, 0.61951244f, 0.50118381f, 0.43325692f, 0.38853383f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.61558151f, 1.08895338f, 0.803307f, 0.64427125f, 0.52423614f, 0.45573691f, 0.41087446f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_45 = { + { 14.61464119f, 0.59516323f, 0.02916753f }, + { 14.61464119f, 0.803307f, 0.25053367f, 0.02916753f }, + { 14.61464119f, 0.95350921f, 0.34370604f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.24153244f, 0.54755926f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.72133851f, 0.36617002f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.61558151f, 0.803307f, 0.45573691f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.91321158f, 0.95350921f, 0.57119018f, 0.36617002f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.19988537f, 1.08895338f, 0.64427125f, 0.41087446f, 0.27464288f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.24153244f, 0.74807048f, 0.50118381f, 0.34370604f, 0.25053367f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.24153244f, 0.74807048f, 0.50118381f, 0.36617002f, 0.27464288f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.803307f, 0.54755926f, 0.41087446f, 0.32104823f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.803307f, 0.57119018f, 0.43325692f, 0.34370604f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.83188516f, 0.59516323f, 0.45573691f, 0.36617002f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.28281462f, 0.83188516f, 0.59516323f, 0.45573691f, 0.36617002f, 0.32104823f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.51179266f, 0.95350921f, 0.69515091f, 0.52423614f, 0.41087446f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.51179266f, 0.95350921f, 0.69515091f, 0.52423614f, 0.43325692f, 0.36617002f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 0.98595673f, 0.72133851f, 0.54755926f, 0.45573691f, 0.38853383f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 1.01931262f, 0.74807048f, 0.57119018f, 0.4783645f, 0.41087446f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.84484982f, 1.56271636f, 1.01931262f, 0.74807048f, 0.59516323f, 0.50118381f, 0.43325692f, 0.38853383f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector> GITS_NOISE_1_50 = { + { 14.61464119f, 0.54755926f, 0.02916753f }, + { 14.61464119f, 0.803307f, 0.25053367f, 0.02916753f }, + { 14.61464119f, 0.86115354f, 0.32104823f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.24153244f, 0.54755926f, 0.25053367f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.56271636f, 0.72133851f, 0.36617002f, 0.19894916f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.61558151f, 0.803307f, 0.45573691f, 0.27464288f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.61558151f, 0.83188516f, 0.52423614f, 0.34370604f, 0.25053367f, 0.17026083f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.95350921f, 0.59516323f, 0.38853383f, 0.27464288f, 0.19894916f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.95350921f, 0.59516323f, 0.41087446f, 0.29807833f, 0.22545385f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 1.84880662f, 0.95350921f, 0.61951244f, 0.43325692f, 0.32104823f, 0.25053367f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.19988537f, 1.12534678f, 0.72133851f, 0.50118381f, 0.36617002f, 0.27464288f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.19988537f, 1.12534678f, 0.72133851f, 0.50118381f, 0.36617002f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 1.24153244f, 0.803307f, 0.57119018f, 0.43325692f, 0.34370604f, 0.29807833f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 1.24153244f, 0.803307f, 0.57119018f, 0.43325692f, 0.34370604f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 1.24153244f, 0.803307f, 0.59516323f, 0.45573691f, 0.36617002f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.36326075f, 1.24153244f, 0.803307f, 0.59516323f, 0.45573691f, 0.38853383f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.32549286f, 0.86115354f, 0.64427125f, 0.50118381f, 0.41087446f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.36964464f, 0.92192322f, 0.69515091f, 0.54755926f, 0.45573691f, 0.41087446f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f }, + { 14.61464119f, 2.45070267f, 1.41535246f, 0.95350921f, 0.72133851f, 0.57119018f, 0.4783645f, 0.43325692f, 0.38853383f, 0.36617002f, 0.34370604f, 0.32104823f, 0.29807833f, 0.27464288f, 0.25053367f, 0.22545385f, 0.19894916f, 0.17026083f, 0.13792117f, 0.09824532f, 0.02916753f } +}; + +const std::vector>*> GITS_NOISE = { + &GITS_NOISE_0_80, + &GITS_NOISE_0_85, + &GITS_NOISE_0_90, + &GITS_NOISE_0_95, + &GITS_NOISE_1_00, + &GITS_NOISE_1_05, + &GITS_NOISE_1_10, + &GITS_NOISE_1_15, + &GITS_NOISE_1_20, + &GITS_NOISE_1_25, + &GITS_NOISE_1_30, + &GITS_NOISE_1_35, + &GITS_NOISE_1_40, + &GITS_NOISE_1_45, + &GITS_NOISE_1_50 +}; + +#endif // GITS_NOISE_INL diff --git a/lora.hpp b/lora.hpp new file mode 100644 index 000000000..35f5aacd1 --- /dev/null +++ b/lora.hpp @@ -0,0 +1,846 @@ +#ifndef __LORA_HPP__ +#define __LORA_HPP__ + +#include "ggml_extend.hpp" + +#define LORA_GRAPH_BASE_SIZE 10240 + +struct LoraModel : public GGMLRunner { + enum lora_t { + REGULAR = 0, + DIFFUSERS = 1, + DIFFUSERS_2 = 2, + DIFFUSERS_3 = 3, + TRANSFORMERS = 4, + LORA_TYPE_COUNT + }; + + const std::string lora_ups[LORA_TYPE_COUNT] = { + ".lora_up", + "_lora.up", + ".lora_B", + ".lora.up", + ".lora_linear_layer.up", + }; + + const std::string lora_downs[LORA_TYPE_COUNT] = { + ".lora_down", + "_lora.down", + ".lora_A", + ".lora.down", + ".lora_linear_layer.down", + }; + + const std::string lora_pre[LORA_TYPE_COUNT] = { + "lora.", + "", + "", + "", + "", + }; + + const std::map alt_names = { + // mmdit + {"final_layer.adaLN_modulation.1", "norm_out.linear"}, + {"pos_embed", "pos_embed.proj"}, + {"final_layer.linear", "proj_out"}, + {"y_embedder.mlp.0", "time_text_embed.text_embedder.linear_1"}, + {"y_embedder.mlp.2", "time_text_embed.text_embedder.linear_2"}, + {"t_embedder.mlp.0", "time_text_embed.timestep_embedder.linear_1"}, + {"t_embedder.mlp.2", "time_text_embed.timestep_embedder.linear_2"}, + {"x_block.mlp.fc1", "ff.net.0.proj"}, + {"x_block.mlp.fc2", "ff.net.2"}, + {"context_block.mlp.fc1", "ff_context.net.0.proj"}, + {"context_block.mlp.fc2", "ff_context.net.2"}, + {"x_block.adaLN_modulation.1", "norm1.linear"}, + {"context_block.adaLN_modulation.1", "norm1_context.linear"}, + {"context_block.attn.proj", "attn.to_add_out"}, + {"x_block.attn.proj", "attn.to_out.0"}, + {"x_block.attn2.proj", "attn2.to_out.0"}, + // flux + // singlestream + {"linear2", "proj_out"}, + {"modulation.lin", "norm.linear"}, + // doublestream + {"txt_attn.proj", "attn.to_add_out"}, + {"img_attn.proj", "attn.to_out.0"}, + {"txt_mlp.0", "ff_context.net.0.proj"}, + {"txt_mlp.2", "ff_context.net.2"}, + {"img_mlp.0", "ff.net.0.proj"}, + {"img_mlp.2", "ff.net.2"}, + {"txt_mod.lin", "norm1_context.linear"}, + {"img_mod.lin", "norm1.linear"}, + }; + + const std::map qkv_prefixes = { + // mmdit + {"context_block.attn.qkv", "attn.add_"}, // suffix "_proj" + {"x_block.attn.qkv", "attn.to_"}, + {"x_block.attn2.qkv", "attn2.to_"}, + // flux + // doublestream + {"txt_attn.qkv", "attn.add_"}, // suffix "_proj" + {"img_attn.qkv", "attn.to_"}, + }; + const std::map qkvm_prefixes = { + // flux + // singlestream + {"linear1", ""}, + }; + + const std::string* type_fingerprints = lora_ups; + + float multiplier = 1.0f; + std::map lora_tensors; + std::string file_path; + ModelLoader model_loader; + bool load_failed = false; + bool applied = false; + std::vector zero_index_vec = {0}; + ggml_tensor* zero_index = NULL; + enum lora_t type = REGULAR; + + LoraModel(ggml_backend_t backend, + const std::string& file_path = "", + const std::string prefix = "") + : file_path(file_path), GGMLRunner(backend) { + if (!model_loader.init_from_file(file_path, prefix)) { + load_failed = true; + } + } + + std::string get_desc() { + return "lora"; + } + + bool load_from_file(bool filter_tensor = false) { + LOG_INFO("loading LoRA from '%s'", file_path.c_str()); + + if (load_failed) { + LOG_ERROR("init lora model loader from file failed: '%s'", file_path.c_str()); + return false; + } + + bool dry_run = true; + auto on_new_tensor_cb = [&](const TensorStorage& tensor_storage, ggml_tensor** dst_tensor) -> bool { + const std::string& name = tensor_storage.name; + + if (filter_tensor && !contains(name, "lora")) { + // LOG_INFO("skipping LoRA tesnor '%s'", name.c_str()); + return true; + } + // LOG_INFO("%s", name.c_str()); + for (int i = 0; i < LORA_TYPE_COUNT; i++) { + if (name.find(type_fingerprints[i]) != std::string::npos) { + type = (lora_t)i; + break; + } + } + + if (dry_run) { + struct ggml_tensor* real = ggml_new_tensor(params_ctx, + tensor_storage.type, + tensor_storage.n_dims, + tensor_storage.ne); + lora_tensors[name] = real; + } else { + auto real = lora_tensors[name]; + *dst_tensor = real; + } + + return true; + }; + + model_loader.load_tensors(on_new_tensor_cb, backend); + alloc_params_buffer(); + // exit(0); + dry_run = false; + model_loader.load_tensors(on_new_tensor_cb, backend); + + LOG_DEBUG("lora type: \"%s\"/\"%s\"", lora_downs[type].c_str(), lora_ups[type].c_str()); + + LOG_DEBUG("finished loaded lora"); + return true; + } + + ggml_tensor* to_f32(ggml_context* ctx, ggml_tensor* a) { + auto out = ggml_reshape_1d(ctx, a, ggml_nelements(a)); + out = ggml_get_rows(ctx, out, zero_index); + out = ggml_reshape(ctx, out, a); + return out; + } + + std::vector to_lora_keys(std::string blk_name, SDVersion version) { + std::vector keys; + // if (!sd_version_is_sd3(version) || blk_name != "model.diffusion_model.pos_embed") { + size_t k_pos = blk_name.find(".weight"); + if (k_pos == std::string::npos) { + return keys; + } + blk_name = blk_name.substr(0, k_pos); + // } + keys.push_back(blk_name); + keys.push_back("lora." + blk_name); + if (sd_version_is_dit(version)) { + if (blk_name.find("model.diffusion_model") != std::string::npos) { + blk_name.replace(blk_name.find("model.diffusion_model"), sizeof("model.diffusion_model") - 1, "transformer"); + } + + if (blk_name.find(".single_blocks") != std::string::npos) { + blk_name.replace(blk_name.find(".single_blocks"), sizeof(".single_blocks") - 1, ".single_transformer_blocks"); + } + if (blk_name.find(".double_blocks") != std::string::npos) { + blk_name.replace(blk_name.find(".double_blocks"), sizeof(".double_blocks") - 1, ".transformer_blocks"); + } + + if (blk_name.find(".joint_blocks") != std::string::npos) { + blk_name.replace(blk_name.find(".joint_blocks"), sizeof(".joint_blocks") - 1, ".transformer_blocks"); + } + + if (blk_name.find("text_encoders.clip_l") != std::string::npos) { + blk_name.replace(blk_name.find("text_encoders.clip_l"), sizeof("text_encoders.clip_l") - 1, "cond_stage_model"); + } + + for (const auto& item : alt_names) { + size_t match = blk_name.find(item.first); + if (match != std::string::npos) { + blk_name = blk_name.substr(0, match) + item.second; + } + } + for (const auto& prefix : qkv_prefixes) { + size_t match = blk_name.find(prefix.first); + if (match != std::string::npos) { + std::string split_blk = "SPLIT|" + blk_name.substr(0, match) + prefix.second; + keys.push_back(split_blk); + } + } + for (const auto& prefix : qkvm_prefixes) { + size_t match = blk_name.find(prefix.first); + if (match != std::string::npos) { + std::string split_blk = "SPLIT_L|" + blk_name.substr(0, match) + prefix.second; + keys.push_back(split_blk); + } + } + keys.push_back(blk_name); + } + + std::vector ret; + for (std::string& key : keys) { + ret.push_back(key); + replace_all_chars(key, '.', '_'); + // fix for some sdxl lora, like lcm-lora-xl + if (key == "model_diffusion_model_output_blocks_2_2_conv") { + ret.push_back("model_diffusion_model_output_blocks_2_1_conv"); + } + ret.push_back(key); + } + return ret; + } + + struct ggml_cgraph* build_lora_graph(std::map model_tensors, SDVersion version) { + size_t lora_graph_size = LORA_GRAPH_BASE_SIZE + lora_tensors.size() * 10; + struct ggml_cgraph* gf = ggml_new_graph_custom(compute_ctx, lora_graph_size, false); + + zero_index = ggml_new_tensor_1d(compute_ctx, GGML_TYPE_I32, 1); + set_backend_tensor_data(zero_index, zero_index_vec.data()); + ggml_build_forward_expand(gf, zero_index); + + std::set applied_lora_tensors; + for (auto it : model_tensors) { + std::string k_tensor = it.first; + struct ggml_tensor* weight = model_tensors[it.first]; + + std::vector keys = to_lora_keys(k_tensor, version); + if (keys.size() == 0) + continue; + + for (auto& key : keys) { + bool is_qkv_split = starts_with(key, "SPLIT|"); + if (is_qkv_split) { + key = key.substr(sizeof("SPLIT|") - 1); + } + bool is_qkvm_split = starts_with(key, "SPLIT_L|"); + if (is_qkvm_split) { + key = key.substr(sizeof("SPLIT_L|") - 1); + } + struct ggml_tensor* updown = NULL; + float scale_value = 1.0f; + std::string fk = lora_pre[type] + key; + if (lora_tensors.find(fk + ".hada_w1_a") != lora_tensors.end()) { + // LoHa mode + + // TODO: split qkv convention for LoHas (is it ever used?) + if (is_qkv_split || is_qkvm_split) { + LOG_ERROR("Split qkv isn't supported for LoHa models."); + break; + } + std::string alpha_name = ""; + + ggml_tensor* hada_1_mid = NULL; // tau for tucker decomposition + ggml_tensor* hada_1_up = NULL; + ggml_tensor* hada_1_down = NULL; + + ggml_tensor* hada_2_mid = NULL; // tau for tucker decomposition + ggml_tensor* hada_2_up = NULL; + ggml_tensor* hada_2_down = NULL; + + std::string hada_1_mid_name = ""; + std::string hada_1_down_name = ""; + std::string hada_1_up_name = ""; + + std::string hada_2_mid_name = ""; + std::string hada_2_down_name = ""; + std::string hada_2_up_name = ""; + + hada_1_down_name = fk + ".hada_w1_b"; + hada_1_up_name = fk + ".hada_w1_a"; + hada_1_mid_name = fk + ".hada_t1"; + if (lora_tensors.find(hada_1_down_name) != lora_tensors.end()) { + hada_1_down = to_f32(compute_ctx, lora_tensors[hada_1_down_name]); + } + if (lora_tensors.find(hada_1_up_name) != lora_tensors.end()) { + hada_1_up = to_f32(compute_ctx, lora_tensors[hada_1_up_name]); + } + if (lora_tensors.find(hada_1_mid_name) != lora_tensors.end()) { + hada_1_mid = to_f32(compute_ctx, lora_tensors[hada_1_mid_name]); + applied_lora_tensors.insert(hada_1_mid_name); + hada_1_up = ggml_cont(compute_ctx, ggml_transpose(compute_ctx, hada_1_up)); + } + + hada_2_down_name = fk + ".hada_w2_b"; + hada_2_up_name = fk + ".hada_w2_a"; + hada_2_mid_name = fk + ".hada_t2"; + if (lora_tensors.find(hada_2_down_name) != lora_tensors.end()) { + hada_2_down = to_f32(compute_ctx, lora_tensors[hada_2_down_name]); + } + if (lora_tensors.find(hada_2_up_name) != lora_tensors.end()) { + hada_2_up = to_f32(compute_ctx, lora_tensors[hada_2_up_name]); + } + if (lora_tensors.find(hada_2_mid_name) != lora_tensors.end()) { + hada_2_mid = to_f32(compute_ctx, lora_tensors[hada_2_mid_name]); + applied_lora_tensors.insert(hada_2_mid_name); + hada_2_up = ggml_cont(compute_ctx, ggml_transpose(compute_ctx, hada_2_up)); + } + + alpha_name = fk + ".alpha"; + + applied_lora_tensors.insert(hada_1_down_name); + applied_lora_tensors.insert(hada_1_up_name); + applied_lora_tensors.insert(hada_2_down_name); + applied_lora_tensors.insert(hada_2_up_name); + + applied_lora_tensors.insert(alpha_name); + if (hada_1_up == NULL || hada_1_down == NULL || hada_2_up == NULL || hada_2_down == NULL) { + continue; + } + + struct ggml_tensor* updown_1 = ggml_merge_lora(compute_ctx, hada_1_down, hada_1_up, hada_1_mid); + struct ggml_tensor* updown_2 = ggml_merge_lora(compute_ctx, hada_2_down, hada_2_up, hada_2_mid); + updown = ggml_mul_inplace(compute_ctx, updown_1, updown_2); + + // calc_scale + // TODO: .dora_scale? + int64_t rank = hada_1_down->ne[ggml_n_dims(hada_1_down) - 1]; + if (lora_tensors.find(alpha_name) != lora_tensors.end()) { + float alpha = ggml_backend_tensor_get_f32(lora_tensors[alpha_name]); + scale_value = alpha / rank; + } + } else if (lora_tensors.find(fk + ".lokr_w1") != lora_tensors.end() || lora_tensors.find(fk + ".lokr_w1_a") != lora_tensors.end()) { + // LoKr mode + + // TODO: split qkv convention for LoKrs (is it ever used?) + if (is_qkv_split || is_qkvm_split) { + LOG_ERROR("Split qkv isn't supported for LoKr models."); + break; + } + + std::string alpha_name = fk + ".alpha"; + + ggml_tensor* lokr_w1 = NULL; + ggml_tensor* lokr_w2 = NULL; + + std::string lokr_w1_name = ""; + std::string lokr_w2_name = ""; + + lokr_w1_name = fk + ".lokr_w1"; + lokr_w2_name = fk + ".lokr_w2"; + + if (lora_tensors.find(lokr_w1_name) != lora_tensors.end()) { + lokr_w1 = to_f32(compute_ctx, lora_tensors[lokr_w1_name]); + applied_lora_tensors.insert(lokr_w1_name); + } else { + ggml_tensor* down = NULL; + ggml_tensor* up = NULL; + std::string down_name = lokr_w1_name + "_b"; + std::string up_name = lokr_w1_name + "_a"; + if (lora_tensors.find(down_name) != lora_tensors.end()) { + // w1 should not be low rank normally, sometimes w1 and w2 are swapped + down = to_f32(compute_ctx, lora_tensors[down_name]); + applied_lora_tensors.insert(down_name); + + int64_t rank = down->ne[ggml_n_dims(down) - 1]; + if (lora_tensors.find(alpha_name) != lora_tensors.end()) { + float alpha = ggml_backend_tensor_get_f32(lora_tensors[alpha_name]); + scale_value = alpha / rank; + } + } + if (lora_tensors.find(up_name) != lora_tensors.end()) { + up = to_f32(compute_ctx, lora_tensors[up_name]); + applied_lora_tensors.insert(up_name); + } + lokr_w1 = ggml_merge_lora(compute_ctx, down, up); + } + if (lora_tensors.find(lokr_w2_name) != lora_tensors.end()) { + lokr_w2 = to_f32(compute_ctx, lora_tensors[lokr_w2_name]); + applied_lora_tensors.insert(lokr_w2_name); + } else { + ggml_tensor* down = NULL; + ggml_tensor* up = NULL; + std::string down_name = lokr_w2_name + "_b"; + std::string up_name = lokr_w2_name + "_a"; + if (lora_tensors.find(down_name) != lora_tensors.end()) { + down = to_f32(compute_ctx, lora_tensors[down_name]); + applied_lora_tensors.insert(down_name); + + int64_t rank = down->ne[ggml_n_dims(down) - 1]; + if (lora_tensors.find(alpha_name) != lora_tensors.end()) { + float alpha = ggml_backend_tensor_get_f32(lora_tensors[alpha_name]); + scale_value = alpha / rank; + } + } + if (lora_tensors.find(up_name) != lora_tensors.end()) { + up = to_f32(compute_ctx, lora_tensors[up_name]); + applied_lora_tensors.insert(up_name); + } + lokr_w2 = ggml_merge_lora(compute_ctx, down, up); + } + + // Technically it might be unused, but I believe it's the expected behavior + applied_lora_tensors.insert(alpha_name); + + updown = ggml_kronecker(compute_ctx, lokr_w1, lokr_w2); + + } else { + // LoRA mode + ggml_tensor* lora_mid = NULL; // tau for tucker decomposition + ggml_tensor* lora_up = NULL; + ggml_tensor* lora_down = NULL; + + std::string alpha_name = ""; + std::string scale_name = ""; + std::string split_q_scale_name = ""; + std::string lora_mid_name = ""; + std::string lora_down_name = ""; + std::string lora_up_name = ""; + + if (is_qkv_split) { + std::string suffix = ""; + auto split_q_d_name = fk + "q" + suffix + lora_downs[type] + ".weight"; + + if (lora_tensors.find(split_q_d_name) == lora_tensors.end()) { + suffix = "_proj"; + split_q_d_name = fk + "q" + suffix + lora_downs[type] + ".weight"; + } + if (lora_tensors.find(split_q_d_name) != lora_tensors.end()) { + // print_ggml_tensor(it.second, true); //[3072, 21504, 1, 1] + // find qkv and mlp up parts in LoRA model + auto split_k_d_name = fk + "k" + suffix + lora_downs[type] + ".weight"; + auto split_v_d_name = fk + "v" + suffix + lora_downs[type] + ".weight"; + + auto split_q_u_name = fk + "q" + suffix + lora_ups[type] + ".weight"; + auto split_k_u_name = fk + "k" + suffix + lora_ups[type] + ".weight"; + auto split_v_u_name = fk + "v" + suffix + lora_ups[type] + ".weight"; + + auto split_q_scale_name = fk + "q" + suffix + ".scale"; + auto split_k_scale_name = fk + "k" + suffix + ".scale"; + auto split_v_scale_name = fk + "v" + suffix + ".scale"; + + auto split_q_alpha_name = fk + "q" + suffix + ".alpha"; + auto split_k_alpha_name = fk + "k" + suffix + ".alpha"; + auto split_v_alpha_name = fk + "v" + suffix + ".alpha"; + + ggml_tensor* lora_q_down = NULL; + ggml_tensor* lora_q_up = NULL; + ggml_tensor* lora_k_down = NULL; + ggml_tensor* lora_k_up = NULL; + ggml_tensor* lora_v_down = NULL; + ggml_tensor* lora_v_up = NULL; + + lora_q_down = to_f32(compute_ctx, lora_tensors[split_q_d_name]); + + if (lora_tensors.find(split_q_u_name) != lora_tensors.end()) { + lora_q_up = to_f32(compute_ctx, lora_tensors[split_q_u_name]); + } + + if (lora_tensors.find(split_k_d_name) != lora_tensors.end()) { + lora_k_down = to_f32(compute_ctx, lora_tensors[split_k_d_name]); + } + + if (lora_tensors.find(split_k_u_name) != lora_tensors.end()) { + lora_k_up = to_f32(compute_ctx, lora_tensors[split_k_u_name]); + } + + if (lora_tensors.find(split_v_d_name) != lora_tensors.end()) { + lora_v_down = to_f32(compute_ctx, lora_tensors[split_v_d_name]); + } + + if (lora_tensors.find(split_v_u_name) != lora_tensors.end()) { + lora_v_up = to_f32(compute_ctx, lora_tensors[split_v_u_name]); + } + + float q_rank = lora_q_up->ne[0]; + float k_rank = lora_k_up->ne[0]; + float v_rank = lora_v_up->ne[0]; + + float lora_q_scale = 1; + float lora_k_scale = 1; + float lora_v_scale = 1; + + if (lora_tensors.find(split_q_scale_name) != lora_tensors.end()) { + lora_q_scale = ggml_backend_tensor_get_f32(lora_tensors[split_q_scale_name]); + applied_lora_tensors.insert(split_q_scale_name); + } + if (lora_tensors.find(split_k_scale_name) != lora_tensors.end()) { + lora_k_scale = ggml_backend_tensor_get_f32(lora_tensors[split_k_scale_name]); + applied_lora_tensors.insert(split_k_scale_name); + } + if (lora_tensors.find(split_v_scale_name) != lora_tensors.end()) { + lora_v_scale = ggml_backend_tensor_get_f32(lora_tensors[split_v_scale_name]); + applied_lora_tensors.insert(split_v_scale_name); + } + + if (lora_tensors.find(split_q_alpha_name) != lora_tensors.end()) { + float lora_q_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_q_alpha_name]); + applied_lora_tensors.insert(split_q_alpha_name); + lora_q_scale = lora_q_alpha / q_rank; + } + if (lora_tensors.find(split_k_alpha_name) != lora_tensors.end()) { + float lora_k_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_k_alpha_name]); + applied_lora_tensors.insert(split_k_alpha_name); + lora_k_scale = lora_k_alpha / k_rank; + } + if (lora_tensors.find(split_v_alpha_name) != lora_tensors.end()) { + float lora_v_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_v_alpha_name]); + applied_lora_tensors.insert(split_v_alpha_name); + lora_v_scale = lora_v_alpha / v_rank; + } + + ggml_scale_inplace(compute_ctx, lora_q_down, lora_q_scale); + ggml_scale_inplace(compute_ctx, lora_k_down, lora_k_scale); + ggml_scale_inplace(compute_ctx, lora_v_down, lora_v_scale); + + // print_ggml_tensor(lora_q_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_k_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_v_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_q_up, true); //[R, 3072, 1, 1] + // print_ggml_tensor(lora_k_up, true); //[R, 3072, 1, 1] + // print_ggml_tensor(lora_v_up, true); //[R, 3072, 1, 1] + + // these need to be stitched together this way: + // |q_up,0 ,0 | + // |0 ,k_up,0 | + // |0 ,0 ,v_up| + // (q_down,k_down,v_down) . (q ,k ,v) + + // up_concat will be [9216, R*3, 1, 1] + // down_concat will be [R*3, 3072, 1, 1] + ggml_tensor* lora_down_concat = ggml_concat(compute_ctx, ggml_concat(compute_ctx, lora_q_down, lora_k_down, 1), lora_v_down, 1); + + ggml_tensor* z = ggml_dup_tensor(compute_ctx, lora_q_up); + ggml_scale(compute_ctx, z, 0); + ggml_tensor* zz = ggml_concat(compute_ctx, z, z, 1); + + ggml_tensor* q_up = ggml_concat(compute_ctx, lora_q_up, zz, 1); + ggml_tensor* k_up = ggml_concat(compute_ctx, ggml_concat(compute_ctx, z, lora_k_up, 1), z, 1); + ggml_tensor* v_up = ggml_concat(compute_ctx, zz, lora_v_up, 1); + // print_ggml_tensor(q_up, true); //[R, 9216, 1, 1] + // print_ggml_tensor(k_up, true); //[R, 9216, 1, 1] + // print_ggml_tensor(v_up, true); //[R, 9216, 1, 1] + ggml_tensor* lora_up_concat = ggml_concat(compute_ctx, ggml_concat(compute_ctx, q_up, k_up, 0), v_up, 0); + // print_ggml_tensor(lora_up_concat, true); //[R*3, 9216, 1, 1] + + lora_down = ggml_cont(compute_ctx, lora_down_concat); + lora_up = ggml_cont(compute_ctx, lora_up_concat); + + applied_lora_tensors.insert(split_q_u_name); + applied_lora_tensors.insert(split_k_u_name); + applied_lora_tensors.insert(split_v_u_name); + + applied_lora_tensors.insert(split_q_d_name); + applied_lora_tensors.insert(split_k_d_name); + applied_lora_tensors.insert(split_v_d_name); + } + } else if (is_qkvm_split) { + auto split_q_d_name = fk + "attn.to_q" + lora_downs[type] + ".weight"; + if (lora_tensors.find(split_q_d_name) != lora_tensors.end()) { + // print_ggml_tensor(it.second, true); //[3072, 21504, 1, 1] + // find qkv and mlp up parts in LoRA model + auto split_k_d_name = fk + "attn.to_k" + lora_downs[type] + ".weight"; + auto split_v_d_name = fk + "attn.to_v" + lora_downs[type] + ".weight"; + + auto split_q_u_name = fk + "attn.to_q" + lora_ups[type] + ".weight"; + auto split_k_u_name = fk + "attn.to_k" + lora_ups[type] + ".weight"; + auto split_v_u_name = fk + "attn.to_v" + lora_ups[type] + ".weight"; + + auto split_m_d_name = fk + "proj_mlp" + lora_downs[type] + ".weight"; + auto split_m_u_name = fk + "proj_mlp" + lora_ups[type] + ".weight"; + + auto split_q_scale_name = fk + "attn.to_q" + ".scale"; + auto split_k_scale_name = fk + "attn.to_k" + ".scale"; + auto split_v_scale_name = fk + "attn.to_v" + ".scale"; + auto split_m_scale_name = fk + "proj_mlp" + ".scale"; + + auto split_q_alpha_name = fk + "attn.to_q" + ".alpha"; + auto split_k_alpha_name = fk + "attn.to_k" + ".alpha"; + auto split_v_alpha_name = fk + "attn.to_v" + ".alpha"; + auto split_m_alpha_name = fk + "proj_mlp" + ".alpha"; + + ggml_tensor* lora_q_down = NULL; + ggml_tensor* lora_q_up = NULL; + ggml_tensor* lora_k_down = NULL; + ggml_tensor* lora_k_up = NULL; + ggml_tensor* lora_v_down = NULL; + ggml_tensor* lora_v_up = NULL; + + ggml_tensor* lora_m_down = NULL; + ggml_tensor* lora_m_up = NULL; + + lora_q_up = to_f32(compute_ctx, lora_tensors[split_q_u_name]); + + if (lora_tensors.find(split_q_d_name) != lora_tensors.end()) { + lora_q_down = to_f32(compute_ctx, lora_tensors[split_q_d_name]); + } + + if (lora_tensors.find(split_q_u_name) != lora_tensors.end()) { + lora_q_up = to_f32(compute_ctx, lora_tensors[split_q_u_name]); + } + + if (lora_tensors.find(split_k_d_name) != lora_tensors.end()) { + lora_k_down = to_f32(compute_ctx, lora_tensors[split_k_d_name]); + } + + if (lora_tensors.find(split_k_u_name) != lora_tensors.end()) { + lora_k_up = to_f32(compute_ctx, lora_tensors[split_k_u_name]); + } + + if (lora_tensors.find(split_v_d_name) != lora_tensors.end()) { + lora_v_down = to_f32(compute_ctx, lora_tensors[split_v_d_name]); + } + + if (lora_tensors.find(split_v_u_name) != lora_tensors.end()) { + lora_v_up = to_f32(compute_ctx, lora_tensors[split_v_u_name]); + } + + if (lora_tensors.find(split_m_d_name) != lora_tensors.end()) { + lora_m_down = to_f32(compute_ctx, lora_tensors[split_m_d_name]); + } + + if (lora_tensors.find(split_m_u_name) != lora_tensors.end()) { + lora_m_up = to_f32(compute_ctx, lora_tensors[split_m_u_name]); + } + + float q_rank = lora_q_up->ne[0]; + float k_rank = lora_k_up->ne[0]; + float v_rank = lora_v_up->ne[0]; + float m_rank = lora_v_up->ne[0]; + + float lora_q_scale = 1; + float lora_k_scale = 1; + float lora_v_scale = 1; + float lora_m_scale = 1; + + if (lora_tensors.find(split_q_scale_name) != lora_tensors.end()) { + lora_q_scale = ggml_backend_tensor_get_f32(lora_tensors[split_q_scale_name]); + applied_lora_tensors.insert(split_q_scale_name); + } + if (lora_tensors.find(split_k_scale_name) != lora_tensors.end()) { + lora_k_scale = ggml_backend_tensor_get_f32(lora_tensors[split_k_scale_name]); + applied_lora_tensors.insert(split_k_scale_name); + } + if (lora_tensors.find(split_v_scale_name) != lora_tensors.end()) { + lora_v_scale = ggml_backend_tensor_get_f32(lora_tensors[split_v_scale_name]); + applied_lora_tensors.insert(split_v_scale_name); + } + if (lora_tensors.find(split_m_scale_name) != lora_tensors.end()) { + lora_m_scale = ggml_backend_tensor_get_f32(lora_tensors[split_m_scale_name]); + applied_lora_tensors.insert(split_m_scale_name); + } + + if (lora_tensors.find(split_q_alpha_name) != lora_tensors.end()) { + float lora_q_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_q_alpha_name]); + applied_lora_tensors.insert(split_q_alpha_name); + lora_q_scale = lora_q_alpha / q_rank; + } + if (lora_tensors.find(split_k_alpha_name) != lora_tensors.end()) { + float lora_k_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_k_alpha_name]); + applied_lora_tensors.insert(split_k_alpha_name); + lora_k_scale = lora_k_alpha / k_rank; + } + if (lora_tensors.find(split_v_alpha_name) != lora_tensors.end()) { + float lora_v_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_v_alpha_name]); + applied_lora_tensors.insert(split_v_alpha_name); + lora_v_scale = lora_v_alpha / v_rank; + } + if (lora_tensors.find(split_m_alpha_name) != lora_tensors.end()) { + float lora_m_alpha = ggml_backend_tensor_get_f32(lora_tensors[split_m_alpha_name]); + applied_lora_tensors.insert(split_m_alpha_name); + lora_m_scale = lora_m_alpha / m_rank; + } + + ggml_scale_inplace(compute_ctx, lora_q_down, lora_q_scale); + ggml_scale_inplace(compute_ctx, lora_k_down, lora_k_scale); + ggml_scale_inplace(compute_ctx, lora_v_down, lora_v_scale); + ggml_scale_inplace(compute_ctx, lora_m_down, lora_m_scale); + + // print_ggml_tensor(lora_q_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_k_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_v_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_m_down, true); //[3072, R, 1, 1] + // print_ggml_tensor(lora_q_up, true); //[R, 3072, 1, 1] + // print_ggml_tensor(lora_k_up, true); //[R, 3072, 1, 1] + // print_ggml_tensor(lora_v_up, true); //[R, 3072, 1, 1] + // print_ggml_tensor(lora_m_up, true); //[R, 12288, 1, 1] + + // these need to be stitched together this way: + // |q_up,0 ,0 ,0 | + // |0 ,k_up,0 ,0 | + // |0 ,0 ,v_up,0 | + // |0 ,0 ,0 ,m_up| + // (q_down,k_down,v_down,m_down) . (q ,k ,v ,m) + + // up_concat will be [21504, R*4, 1, 1] + // down_concat will be [R*4, 3072, 1, 1] + + ggml_tensor* lora_down_concat = ggml_concat(compute_ctx, ggml_concat(compute_ctx, lora_q_down, lora_k_down, 1), ggml_concat(compute_ctx, lora_v_down, lora_m_down, 1), 1); + // print_ggml_tensor(lora_down_concat, true); //[3072, R*4, 1, 1] + + // this also means that if rank is bigger than 672, it is less memory efficient to do it this way (should be fine) + // print_ggml_tensor(lora_q_up, true); //[3072, R, 1, 1] + ggml_tensor* z = ggml_dup_tensor(compute_ctx, lora_q_up); + ggml_tensor* mlp_z = ggml_dup_tensor(compute_ctx, lora_m_up); + ggml_scale(compute_ctx, z, 0); + ggml_scale(compute_ctx, mlp_z, 0); + ggml_tensor* zz = ggml_concat(compute_ctx, z, z, 1); + + ggml_tensor* q_up = ggml_concat(compute_ctx, ggml_concat(compute_ctx, lora_q_up, zz, 1), mlp_z, 1); + ggml_tensor* k_up = ggml_concat(compute_ctx, ggml_concat(compute_ctx, z, lora_k_up, 1), ggml_concat(compute_ctx, z, mlp_z, 1), 1); + ggml_tensor* v_up = ggml_concat(compute_ctx, ggml_concat(compute_ctx, zz, lora_v_up, 1), mlp_z, 1); + ggml_tensor* m_up = ggml_concat(compute_ctx, ggml_concat(compute_ctx, zz, z, 1), lora_m_up, 1); + // print_ggml_tensor(q_up, true); //[R, 21504, 1, 1] + // print_ggml_tensor(k_up, true); //[R, 21504, 1, 1] + // print_ggml_tensor(v_up, true); //[R, 21504, 1, 1] + // print_ggml_tensor(m_up, true); //[R, 21504, 1, 1] + + ggml_tensor* lora_up_concat = ggml_concat(compute_ctx, ggml_concat(compute_ctx, q_up, k_up, 0), ggml_concat(compute_ctx, v_up, m_up, 0), 0); + // print_ggml_tensor(lora_up_concat, true); //[R*4, 21504, 1, 1] + + lora_down = ggml_cont(compute_ctx, lora_down_concat); + lora_up = ggml_cont(compute_ctx, lora_up_concat); + + applied_lora_tensors.insert(split_q_u_name); + applied_lora_tensors.insert(split_k_u_name); + applied_lora_tensors.insert(split_v_u_name); + applied_lora_tensors.insert(split_m_u_name); + + applied_lora_tensors.insert(split_q_d_name); + applied_lora_tensors.insert(split_k_d_name); + applied_lora_tensors.insert(split_v_d_name); + applied_lora_tensors.insert(split_m_d_name); + } + } else { + lora_up_name = fk + lora_ups[type] + ".weight"; + lora_down_name = fk + lora_downs[type] + ".weight"; + lora_mid_name = fk + ".lora_mid.weight"; + + alpha_name = fk + ".alpha"; + scale_name = fk + ".scale"; + + if (lora_tensors.find(lora_up_name) != lora_tensors.end()) { + lora_up = to_f32(compute_ctx, lora_tensors[lora_up_name]); + } + + if (lora_tensors.find(lora_down_name) != lora_tensors.end()) { + lora_down = to_f32(compute_ctx, lora_tensors[lora_down_name]); + } + + if (lora_tensors.find(lora_mid_name) != lora_tensors.end()) { + lora_mid = to_f32(compute_ctx, lora_tensors[lora_mid_name]); + applied_lora_tensors.insert(lora_mid_name); + } + + applied_lora_tensors.insert(lora_up_name); + applied_lora_tensors.insert(lora_down_name); + applied_lora_tensors.insert(alpha_name); + applied_lora_tensors.insert(scale_name); + } + + if (lora_up == NULL || lora_down == NULL) { + continue; + } + // calc_scale + // TODO: .dora_scale? + int64_t rank = lora_down->ne[ggml_n_dims(lora_down) - 1]; + if (lora_tensors.find(scale_name) != lora_tensors.end()) { + scale_value = ggml_backend_tensor_get_f32(lora_tensors[scale_name]); + } else if (lora_tensors.find(alpha_name) != lora_tensors.end()) { + float alpha = ggml_backend_tensor_get_f32(lora_tensors[alpha_name]); + scale_value = alpha / rank; + } + + updown = ggml_merge_lora(compute_ctx, lora_down, lora_up, lora_mid); + } + scale_value *= multiplier; + updown = ggml_reshape(compute_ctx, updown, weight); + GGML_ASSERT(ggml_nelements(updown) == ggml_nelements(weight)); + updown = ggml_scale_inplace(compute_ctx, updown, scale_value); + ggml_tensor* final_weight; + if (weight->type != GGML_TYPE_F32 && weight->type != GGML_TYPE_F16) { + // final_weight = ggml_new_tensor(compute_ctx, GGML_TYPE_F32, ggml_n_dims(weight), weight->ne); + // final_weight = ggml_cpy(compute_ctx, weight, final_weight); + final_weight = to_f32(compute_ctx, weight); + final_weight = ggml_add_inplace(compute_ctx, final_weight, updown); + final_weight = ggml_cpy(compute_ctx, final_weight, weight); + } else { + final_weight = ggml_add_inplace(compute_ctx, weight, updown); + } + // final_weight = ggml_add_inplace(compute_ctx, weight, updown); // apply directly + ggml_build_forward_expand(gf, final_weight); + break; + } + } + size_t total_lora_tensors_count = 0; + size_t applied_lora_tensors_count = 0; + + for (auto& kv : lora_tensors) { + total_lora_tensors_count++; + if (applied_lora_tensors.find(kv.first) == applied_lora_tensors.end()) { + LOG_WARN("unused lora tensor |%s|", kv.first.c_str()); + print_ggml_tensor(kv.second, true); + // exit(0); + } else { + applied_lora_tensors_count++; + } + } + /* Don't worry if this message shows up twice in the logs per LoRA, + * this function is called once to calculate the required buffer size + * and then again to actually generate a graph to be used */ + if (applied_lora_tensors_count != total_lora_tensors_count) { + LOG_WARN("Only (%lu / %lu) LoRA tensors have been applied", + applied_lora_tensors_count, total_lora_tensors_count); + } else { + LOG_DEBUG("(%lu / %lu) LoRA tensors applied successfully", + applied_lora_tensors_count, total_lora_tensors_count); + } + + return gf; + } + + void apply(std::map model_tensors, SDVersion version, int n_threads) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_lora_graph(model_tensors, version); + }; + GGMLRunner::compute(get_graph, n_threads, true); + } +}; + +#endif // __LORA_HPP__ diff --git a/mmdit.hpp b/mmdit.hpp new file mode 100644 index 000000000..dee7b1c49 --- /dev/null +++ b/mmdit.hpp @@ -0,0 +1,1002 @@ +#ifndef __MMDIT_HPP__ +#define __MMDIT_HPP__ + +#include "ggml_extend.hpp" +#include "model.h" + +#define MMDIT_GRAPH_SIZE 10240 + +struct Mlp : public GGMLBlock { +public: + Mlp(int64_t in_features, + int64_t hidden_features = -1, + int64_t out_features = -1, + bool bias = true) { + // act_layer is always lambda: nn.GELU(approximate="tanh") + // norm_layer is always None + // use_conv is always False + if (hidden_features == -1) { + hidden_features = in_features; + } + if (out_features == -1) { + out_features = in_features; + } + blocks["fc1"] = std::shared_ptr(new Linear(in_features, hidden_features, bias)); + blocks["fc2"] = std::shared_ptr(new Linear(hidden_features, out_features, bias)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, n_token, in_features] + auto fc1 = std::dynamic_pointer_cast(blocks["fc1"]); + auto fc2 = std::dynamic_pointer_cast(blocks["fc2"]); + + x = fc1->forward(ctx, x); + x = ggml_gelu_inplace(ctx, x); + x = fc2->forward(ctx, x); + return x; + } +}; + +struct PatchEmbed : public GGMLBlock { + // 2D Image to Patch Embedding +protected: + bool flatten; + bool dynamic_img_pad; + int patch_size; + +public: + PatchEmbed(int64_t img_size = 224, + int patch_size = 16, + int64_t in_chans = 3, + int64_t embed_dim = 1536, + bool bias = true, + bool flatten = true, + bool dynamic_img_pad = true) + : patch_size(patch_size), + flatten(flatten), + dynamic_img_pad(dynamic_img_pad) { + // img_size is always None + // patch_size is always 2 + // in_chans is always 16 + // norm_layer is always False + // strict_img_size is always true, but not used + + blocks["proj"] = std::shared_ptr(new Conv2d(in_chans, + embed_dim, + {patch_size, patch_size}, + {patch_size, patch_size}, + {0, 0}, + {1, 1}, + bias)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, C, H, W] + // return: [N, H*W, embed_dim] + auto proj = std::dynamic_pointer_cast(blocks["proj"]); + + if (dynamic_img_pad) { + int64_t W = x->ne[0]; + int64_t H = x->ne[1]; + int pad_h = (patch_size - H % patch_size) % patch_size; + int pad_w = (patch_size - W % patch_size) % patch_size; + x = ggml_pad(ctx, x, pad_w, pad_h, 0, 0); // TODO: reflect pad mode + } + x = proj->forward(ctx, x); + + if (flatten) { + x = ggml_reshape_3d(ctx, x, x->ne[0] * x->ne[1], x->ne[2], x->ne[3]); + x = ggml_cont(ctx, ggml_permute(ctx, x, 1, 0, 2, 3)); + } + return x; + } +}; + +struct TimestepEmbedder : public GGMLBlock { + // Embeds scalar timesteps into vector representations. +protected: + int64_t frequency_embedding_size; + +public: + TimestepEmbedder(int64_t hidden_size, + int64_t frequency_embedding_size = 256) + : frequency_embedding_size(frequency_embedding_size) { + blocks["mlp.0"] = std::shared_ptr(new Linear(frequency_embedding_size, hidden_size, true, true)); + blocks["mlp.2"] = std::shared_ptr(new Linear(hidden_size, hidden_size, true, true)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* t) { + // t: [N, ] + // return: [N, hidden_size] + auto mlp_0 = std::dynamic_pointer_cast(blocks["mlp.0"]); + auto mlp_2 = std::dynamic_pointer_cast(blocks["mlp.2"]); + + auto t_freq = ggml_nn_timestep_embedding(ctx, t, frequency_embedding_size); // [N, frequency_embedding_size] + + auto t_emb = mlp_0->forward(ctx, t_freq); + t_emb = ggml_silu_inplace(ctx, t_emb); + t_emb = mlp_2->forward(ctx, t_emb); + return t_emb; + } +}; + +struct VectorEmbedder : public GGMLBlock { + // Embeds a flat vector of dimension input_dim +public: + VectorEmbedder(int64_t input_dim, + int64_t hidden_size) { + blocks["mlp.0"] = std::shared_ptr(new Linear(input_dim, hidden_size, true, true)); + blocks["mlp.2"] = std::shared_ptr(new Linear(hidden_size, hidden_size, true, true)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, input_dim] + // return: [N, hidden_size] + auto mlp_0 = std::dynamic_pointer_cast(blocks["mlp.0"]); + auto mlp_2 = std::dynamic_pointer_cast(blocks["mlp.2"]); + + x = mlp_0->forward(ctx, x); + x = ggml_silu_inplace(ctx, x); + x = mlp_2->forward(ctx, x); + return x; + } +}; + +class RMSNorm : public UnaryBlock { +protected: + int64_t hidden_size; + float eps; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, std::string prefix = "") { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + params["weight"] = ggml_new_tensor_1d(ctx, wtype, hidden_size); + } + +public: + RMSNorm(int64_t hidden_size, + float eps = 1e-06f) + : hidden_size(hidden_size), + eps(eps) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + x = ggml_rms_norm(ctx, x, eps); + x = ggml_mul(ctx, x, w); + return x; + } +}; + +class SelfAttention : public GGMLBlock { +public: + int64_t num_heads; + bool pre_only; + std::string qk_norm; + +public: + SelfAttention(int64_t dim, + int64_t num_heads = 8, + std::string qk_norm = "", + bool qkv_bias = false, + bool pre_only = false) + : num_heads(num_heads), pre_only(pre_only), qk_norm(qk_norm) { + int64_t d_head = dim / num_heads; + blocks["qkv"] = std::shared_ptr(new Linear(dim, dim * 3, qkv_bias)); + if (!pre_only) { + blocks["proj"] = std::shared_ptr(new Linear(dim, dim)); + } + if (qk_norm == "rms") { + blocks["ln_q"] = std::shared_ptr(new RMSNorm(d_head, 1.0e-6)); + blocks["ln_k"] = std::shared_ptr(new RMSNorm(d_head, 1.0e-6)); + } else if (qk_norm == "ln") { + blocks["ln_q"] = std::shared_ptr(new LayerNorm(d_head, 1.0e-6)); + blocks["ln_k"] = std::shared_ptr(new LayerNorm(d_head, 1.0e-6)); + } + } + + std::vector pre_attention(struct ggml_context* ctx, struct ggml_tensor* x) { + auto qkv_proj = std::dynamic_pointer_cast(blocks["qkv"]); + + auto qkv = qkv_proj->forward(ctx, x); + auto qkv_vec = split_qkv(ctx, qkv); + int64_t head_dim = qkv_vec[0]->ne[0] / num_heads; + auto q = ggml_reshape_4d(ctx, qkv_vec[0], head_dim, num_heads, qkv_vec[0]->ne[1], qkv_vec[0]->ne[2]); // [N, n_token, n_head, d_head] + auto k = ggml_reshape_4d(ctx, qkv_vec[1], head_dim, num_heads, qkv_vec[1]->ne[1], qkv_vec[1]->ne[2]); // [N, n_token, n_head, d_head] + auto v = qkv_vec[2]; // [N, n_token, n_head*d_head] + + if (qk_norm == "rms" || qk_norm == "ln") { + auto ln_q = std::dynamic_pointer_cast(blocks["ln_q"]); + auto ln_k = std::dynamic_pointer_cast(blocks["ln_k"]); + q = ln_q->forward(ctx, q); + k = ln_k->forward(ctx, k); + } + + q = ggml_reshape_3d(ctx, q, q->ne[0] * q->ne[1], q->ne[2], q->ne[3]); // [N, n_token, n_head*d_head] + k = ggml_reshape_3d(ctx, k, k->ne[0] * k->ne[1], k->ne[2], k->ne[3]); // [N, n_token, n_head*d_head] + + return {q, k, v}; + } + + struct ggml_tensor* post_attention(struct ggml_context* ctx, struct ggml_tensor* x) { + GGML_ASSERT(!pre_only); + + auto proj = std::dynamic_pointer_cast(blocks["proj"]); + + x = proj->forward(ctx, x); // [N, n_token, dim] + return x; + } + + // x: [N, n_token, dim] + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + auto qkv = pre_attention(ctx, x); + x = ggml_nn_attention_ext(ctx, qkv[0], qkv[1], qkv[2], num_heads); // [N, n_token, dim] + x = post_attention(ctx, x); // [N, n_token, dim] + return x; + } +}; + +__STATIC_INLINE__ struct ggml_tensor* modulate(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* shift, + struct ggml_tensor* scale) { + // x: [N, L, C] + // scale: [N, C] + // shift: [N, C] + scale = ggml_reshape_3d(ctx, scale, scale->ne[0], 1, scale->ne[1]); // [N, 1, C] + shift = ggml_reshape_3d(ctx, shift, shift->ne[0], 1, shift->ne[1]); // [N, 1, C] + x = ggml_add(ctx, x, ggml_mul(ctx, x, scale)); + x = ggml_add(ctx, x, shift); + return x; +} + +struct DismantledBlock : public GGMLBlock { + // A DiT block with gated adaptive layer norm (adaLN) conditioning. +public: + int64_t num_heads; + bool pre_only; + bool self_attn; + +public: + DismantledBlock(int64_t hidden_size, + int64_t num_heads, + float mlp_ratio = 4.0, + std::string qk_norm = "", + bool qkv_bias = false, + bool pre_only = false, + bool self_attn = false) + : num_heads(num_heads), pre_only(pre_only), self_attn(self_attn) { + // rmsnorm is always Flase + // scale_mod_only is always Flase + // swiglu is always Flase + blocks["norm1"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-06f, false)); + blocks["attn"] = std::shared_ptr(new SelfAttention(hidden_size, num_heads, qk_norm, qkv_bias, pre_only)); + + if (self_attn) { + blocks["attn2"] = std::shared_ptr(new SelfAttention(hidden_size, num_heads, qk_norm, qkv_bias, false)); + } + + if (!pre_only) { + blocks["norm2"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-06f, false)); + int64_t mlp_hidden_dim = (int64_t)(hidden_size * mlp_ratio); + blocks["mlp"] = std::shared_ptr(new Mlp(hidden_size, mlp_hidden_dim)); + } + + int64_t n_mods = 6; + if (pre_only) { + n_mods = 2; + } + if (self_attn) { + n_mods = 9; + } + blocks["adaLN_modulation.1"] = std::shared_ptr(new Linear(hidden_size, n_mods * hidden_size)); + } + + std::tuple, std::vector, std::vector> pre_attention_x(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* c) { + GGML_ASSERT(self_attn); + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + auto norm1 = std::dynamic_pointer_cast(blocks["norm1"]); + auto attn = std::dynamic_pointer_cast(blocks["attn"]); + auto attn2 = std::dynamic_pointer_cast(blocks["attn2"]); + auto adaLN_modulation_1 = std::dynamic_pointer_cast(blocks["adaLN_modulation.1"]); + + int64_t n_mods = 9; + auto m = adaLN_modulation_1->forward(ctx, ggml_silu(ctx, c)); // [N, n_mods * hidden_size] + m = ggml_reshape_3d(ctx, m, c->ne[0], n_mods, c->ne[1]); // [N, n_mods, hidden_size] + m = ggml_cont(ctx, ggml_permute(ctx, m, 0, 2, 1, 3)); // [n_mods, N, hidden_size] + + int64_t offset = m->nb[1] * m->ne[1]; + auto shift_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 0); // [N, hidden_size] + auto scale_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 1); // [N, hidden_size] + auto gate_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 2); // [N, hidden_size] + + auto shift_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 3); // [N, hidden_size] + auto scale_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 4); // [N, hidden_size] + auto gate_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 5); // [N, hidden_size] + + auto shift_msa2 = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 6); // [N, hidden_size] + auto scale_msa2 = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 7); // [N, hidden_size] + auto gate_msa2 = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 8); // [N, hidden_size] + + auto x_norm = norm1->forward(ctx, x); + + auto attn_in = modulate(ctx, x_norm, shift_msa, scale_msa); + auto qkv = attn->pre_attention(ctx, attn_in); + + auto attn2_in = modulate(ctx, x_norm, shift_msa2, scale_msa2); + auto qkv2 = attn2->pre_attention(ctx, attn2_in); + + return {qkv, qkv2, {x, gate_msa, shift_mlp, scale_mlp, gate_mlp, gate_msa2}}; + } + + std::pair, std::vector> pre_attention(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* c) { + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + auto norm1 = std::dynamic_pointer_cast(blocks["norm1"]); + auto attn = std::dynamic_pointer_cast(blocks["attn"]); + auto adaLN_modulation_1 = std::dynamic_pointer_cast(blocks["adaLN_modulation.1"]); + + int64_t n_mods = 6; + if (pre_only) { + n_mods = 2; + } + auto m = adaLN_modulation_1->forward(ctx, ggml_silu(ctx, c)); // [N, n_mods * hidden_size] + m = ggml_reshape_3d(ctx, m, c->ne[0], n_mods, c->ne[1]); // [N, n_mods, hidden_size] + m = ggml_cont(ctx, ggml_permute(ctx, m, 0, 2, 1, 3)); // [n_mods, N, hidden_size] + + int64_t offset = m->nb[1] * m->ne[1]; + auto shift_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 0); // [N, hidden_size] + auto scale_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 1); // [N, hidden_size] + if (!pre_only) { + auto gate_msa = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 2); // [N, hidden_size] + auto shift_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 3); // [N, hidden_size] + auto scale_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 4); // [N, hidden_size] + auto gate_mlp = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 5); // [N, hidden_size] + + auto attn_in = modulate(ctx, norm1->forward(ctx, x), shift_msa, scale_msa); + + auto qkv = attn->pre_attention(ctx, attn_in); + + return {qkv, {x, gate_msa, shift_mlp, scale_mlp, gate_mlp}}; + } else { + auto attn_in = modulate(ctx, norm1->forward(ctx, x), shift_msa, scale_msa); + auto qkv = attn->pre_attention(ctx, attn_in); + + return {qkv, {NULL, NULL, NULL, NULL, NULL}}; + } + } + + struct ggml_tensor* post_attention_x(struct ggml_context* ctx, + struct ggml_tensor* attn_out, + struct ggml_tensor* attn2_out, + struct ggml_tensor* x, + struct ggml_tensor* gate_msa, + struct ggml_tensor* shift_mlp, + struct ggml_tensor* scale_mlp, + struct ggml_tensor* gate_mlp, + struct ggml_tensor* gate_msa2) { + // attn_out: [N, n_token, hidden_size] + // x: [N, n_token, hidden_size] + // gate_msa: [N, hidden_size] + // shift_mlp: [N, hidden_size] + // scale_mlp: [N, hidden_size] + // gate_mlp: [N, hidden_size] + // return: [N, n_token, hidden_size] + GGML_ASSERT(!pre_only); + + auto attn = std::dynamic_pointer_cast(blocks["attn"]); + auto attn2 = std::dynamic_pointer_cast(blocks["attn2"]); + auto norm2 = std::dynamic_pointer_cast(blocks["norm2"]); + auto mlp = std::dynamic_pointer_cast(blocks["mlp"]); + + gate_msa = ggml_reshape_3d(ctx, gate_msa, gate_msa->ne[0], 1, gate_msa->ne[1]); // [N, 1, hidden_size] + gate_mlp = ggml_reshape_3d(ctx, gate_mlp, gate_mlp->ne[0], 1, gate_mlp->ne[1]); // [N, 1, hidden_size] + gate_msa2 = ggml_reshape_3d(ctx, gate_msa2, gate_msa2->ne[0], 1, gate_msa2->ne[1]); // [N, 1, hidden_size] + + attn_out = attn->post_attention(ctx, attn_out); + attn2_out = attn2->post_attention(ctx, attn2_out); + + x = ggml_add(ctx, x, ggml_mul(ctx, attn_out, gate_msa)); + x = ggml_add(ctx, x, ggml_mul(ctx, attn2_out, gate_msa2)); + auto mlp_out = mlp->forward(ctx, modulate(ctx, norm2->forward(ctx, x), shift_mlp, scale_mlp)); + x = ggml_add(ctx, x, ggml_mul(ctx, mlp_out, gate_mlp)); + + return x; + } + + struct ggml_tensor* post_attention(struct ggml_context* ctx, + struct ggml_tensor* attn_out, + struct ggml_tensor* x, + struct ggml_tensor* gate_msa, + struct ggml_tensor* shift_mlp, + struct ggml_tensor* scale_mlp, + struct ggml_tensor* gate_mlp) { + // attn_out: [N, n_token, hidden_size] + // x: [N, n_token, hidden_size] + // gate_msa: [N, hidden_size] + // shift_mlp: [N, hidden_size] + // scale_mlp: [N, hidden_size] + // gate_mlp: [N, hidden_size] + // return: [N, n_token, hidden_size] + GGML_ASSERT(!pre_only); + + auto attn = std::dynamic_pointer_cast(blocks["attn"]); + auto norm2 = std::dynamic_pointer_cast(blocks["norm2"]); + auto mlp = std::dynamic_pointer_cast(blocks["mlp"]); + + gate_msa = ggml_reshape_3d(ctx, gate_msa, gate_msa->ne[0], 1, gate_msa->ne[1]); // [N, 1, hidden_size] + gate_mlp = ggml_reshape_3d(ctx, gate_mlp, gate_mlp->ne[0], 1, gate_mlp->ne[1]); // [N, 1, hidden_size] + + attn_out = attn->post_attention(ctx, attn_out); + + x = ggml_add(ctx, x, ggml_mul(ctx, attn_out, gate_msa)); + auto mlp_out = mlp->forward(ctx, modulate(ctx, norm2->forward(ctx, x), shift_mlp, scale_mlp)); + x = ggml_add(ctx, x, ggml_mul(ctx, mlp_out, gate_mlp)); + + return x; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* c) { + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + // return: [N, n_token, hidden_size] + + auto attn = std::dynamic_pointer_cast(blocks["attn"]); + if (self_attn) { + auto qkv_intermediates = pre_attention_x(ctx, x, c); + // auto qkv = qkv_intermediates.first; + // auto intermediates = qkv_intermediates.second; + // no longer a pair, but a tuple + auto qkv = std::get<0>(qkv_intermediates); + auto qkv2 = std::get<1>(qkv_intermediates); + auto intermediates = std::get<2>(qkv_intermediates); + + auto attn_out = ggml_nn_attention_ext(ctx, qkv[0], qkv[1], qkv[2], num_heads); // [N, n_token, dim] + auto attn2_out = ggml_nn_attention_ext(ctx, qkv2[0], qkv2[1], qkv2[2], num_heads); // [N, n_token, dim] + x = post_attention_x(ctx, + attn_out, + attn2_out, + intermediates[0], + intermediates[1], + intermediates[2], + intermediates[3], + intermediates[4], + intermediates[5]); + return x; // [N, n_token, dim] + } else { + auto qkv_intermediates = pre_attention(ctx, x, c); + auto qkv = qkv_intermediates.first; + auto intermediates = qkv_intermediates.second; + + auto attn_out = ggml_nn_attention_ext(ctx, qkv[0], qkv[1], qkv[2], num_heads); // [N, n_token, dim] + x = post_attention(ctx, + attn_out, + intermediates[0], + intermediates[1], + intermediates[2], + intermediates[3], + intermediates[4]); + return x; // [N, n_token, dim] + } + } +}; + +__STATIC_INLINE__ std::pair +block_mixing(struct ggml_context* ctx, + struct ggml_tensor* context, + struct ggml_tensor* x, + struct ggml_tensor* c, + std::shared_ptr context_block, + std::shared_ptr x_block) { + // context: [N, n_context, hidden_size] + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + auto context_qkv_intermediates = context_block->pre_attention(ctx, context, c); + auto context_qkv = context_qkv_intermediates.first; + auto context_intermediates = context_qkv_intermediates.second; + + std::vector x_qkv, x_qkv2, x_intermediates; + + if (x_block->self_attn) { + auto x_qkv_intermediates = x_block->pre_attention_x(ctx, x, c); + x_qkv = std::get<0>(x_qkv_intermediates); + x_qkv2 = std::get<1>(x_qkv_intermediates); + x_intermediates = std::get<2>(x_qkv_intermediates); + } else { + auto x_qkv_intermediates = x_block->pre_attention(ctx, x, c); + x_qkv = x_qkv_intermediates.first; + x_intermediates = x_qkv_intermediates.second; + } + std::vector qkv; + for (int i = 0; i < 3; i++) { + qkv.push_back(ggml_concat(ctx, context_qkv[i], x_qkv[i], 1)); + } + + auto attn = ggml_nn_attention_ext(ctx, qkv[0], qkv[1], qkv[2], x_block->num_heads); // [N, n_context + n_token, hidden_size] + attn = ggml_cont(ctx, ggml_permute(ctx, attn, 0, 2, 1, 3)); // [n_context + n_token, N, hidden_size] + auto context_attn = ggml_view_3d(ctx, + attn, + attn->ne[0], + attn->ne[1], + context->ne[1], + attn->nb[1], + attn->nb[2], + 0); // [n_context, N, hidden_size] + context_attn = ggml_cont(ctx, ggml_permute(ctx, context_attn, 0, 2, 1, 3)); // [N, n_context, hidden_size] + auto x_attn = ggml_view_3d(ctx, + attn, + attn->ne[0], + attn->ne[1], + x->ne[1], + attn->nb[1], + attn->nb[2], + attn->nb[2] * context->ne[1]); // [n_token, N, hidden_size] + x_attn = ggml_cont(ctx, ggml_permute(ctx, x_attn, 0, 2, 1, 3)); // [N, n_token, hidden_size] + + if (!context_block->pre_only) { + context = context_block->post_attention(ctx, + context_attn, + context_intermediates[0], + context_intermediates[1], + context_intermediates[2], + context_intermediates[3], + context_intermediates[4]); + } else { + context = NULL; + } + + if (x_block->self_attn) { + auto attn2 = ggml_nn_attention_ext(ctx, x_qkv2[0], x_qkv2[1], x_qkv2[2], x_block->num_heads); // [N, n_token, hidden_size] + + x = x_block->post_attention_x(ctx, + x_attn, + attn2, + x_intermediates[0], + x_intermediates[1], + x_intermediates[2], + x_intermediates[3], + x_intermediates[4], + x_intermediates[5]); + } else { + x = x_block->post_attention(ctx, + x_attn, + x_intermediates[0], + x_intermediates[1], + x_intermediates[2], + x_intermediates[3], + x_intermediates[4]); + } + + return {context, x}; +} + +struct JointBlock : public GGMLBlock { +public: + JointBlock(int64_t hidden_size, + int64_t num_heads, + float mlp_ratio = 4.0, + std::string qk_norm = "", + bool qkv_bias = false, + bool pre_only = false, + bool self_attn_x = false) { + blocks["context_block"] = std::shared_ptr(new DismantledBlock(hidden_size, num_heads, mlp_ratio, qk_norm, qkv_bias, pre_only)); + blocks["x_block"] = std::shared_ptr(new DismantledBlock(hidden_size, num_heads, mlp_ratio, qk_norm, qkv_bias, false, self_attn_x)); + } + + std::pair forward(struct ggml_context* ctx, + struct ggml_tensor* context, + struct ggml_tensor* x, + struct ggml_tensor* c) { + auto context_block = std::dynamic_pointer_cast(blocks["context_block"]); + auto x_block = std::dynamic_pointer_cast(blocks["x_block"]); + + return block_mixing(ctx, context, x, c, context_block, x_block); + } +}; + +struct FinalLayer : public GGMLBlock { + // The final layer of DiT. +public: + FinalLayer(int64_t hidden_size, + int64_t patch_size, + int64_t out_channels) { + // total_out_channels is always None + blocks["norm_final"] = std::shared_ptr(new LayerNorm(hidden_size, 1e-06f, false)); + blocks["linear"] = std::shared_ptr(new Linear(hidden_size, patch_size * patch_size * out_channels, true, true)); + blocks["adaLN_modulation.1"] = std::shared_ptr(new Linear(hidden_size, 2 * hidden_size)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* c) { + // x: [N, n_token, hidden_size] + // c: [N, hidden_size] + // return: [N, n_token, patch_size * patch_size * out_channels] + auto norm_final = std::dynamic_pointer_cast(blocks["norm_final"]); + auto linear = std::dynamic_pointer_cast(blocks["linear"]); + auto adaLN_modulation_1 = std::dynamic_pointer_cast(blocks["adaLN_modulation.1"]); + + auto m = adaLN_modulation_1->forward(ctx, ggml_silu(ctx, c)); // [N, 2 * hidden_size] + m = ggml_reshape_3d(ctx, m, c->ne[0], 2, c->ne[1]); // [N, 2, hidden_size] + m = ggml_cont(ctx, ggml_permute(ctx, m, 0, 2, 1, 3)); // [2, N, hidden_size] + + int64_t offset = m->nb[1] * m->ne[1]; + auto shift = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 0); // [N, hidden_size] + auto scale = ggml_view_2d(ctx, m, m->ne[0], m->ne[1], m->nb[1], offset * 1); // [N, hidden_size] + + x = modulate(ctx, norm_final->forward(ctx, x), shift, scale); + x = linear->forward(ctx, x); + + return x; + } +}; + +struct MMDiT : public GGMLBlock { + // Diffusion model with a Transformer backbone. +protected: + int64_t input_size = -1; + int64_t patch_size = 2; + int64_t in_channels = 16; + int64_t d_self = -1; // >=0 for MMdiT-X + int64_t depth = 24; + float mlp_ratio = 4.0f; + int64_t adm_in_channels = 2048; + int64_t out_channels = 16; + int64_t pos_embed_max_size = 192; + int64_t num_patchs = 36864; // 192 * 192 + int64_t context_size = 4096; + int64_t context_embedder_out_dim = 1536; + int64_t hidden_size; + std::string qk_norm; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, std::string prefix = "") { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "pos_embed") != tensor_types.end()) ? tensor_types[prefix + "pos_embed"] : GGML_TYPE_F32; + params["pos_embed"] = ggml_new_tensor_3d(ctx, wtype, hidden_size, num_patchs, 1); + } + +public: + MMDiT(std::map& tensor_types) { + // input_size is always None + // learn_sigma is always False + // register_length is alwalys 0 + // rmsnorm is alwalys False + // scale_mod_only is alwalys False + // swiglu is alwalys False + // qkv_bias is always True + // context_processor_layers is always None + // pos_embed_scaling_factor is not used + // pos_embed_offset is not used + // context_embedder_config is always {'target': 'torch.nn.Linear', 'params': {'in_features': 4096, 'out_features': 1536}} + + // read tensors from tensor_types + for (auto pair : tensor_types) { + std::string tensor_name = pair.first; + if (tensor_name.find("model.diffusion_model.") == std::string::npos) + continue; + size_t jb = tensor_name.find("joint_blocks."); + if (jb != std::string::npos) { + tensor_name = tensor_name.substr(jb); // remove prefix + int block_depth = atoi(tensor_name.substr(13, tensor_name.find(".", 13)).c_str()); + if (block_depth + 1 > depth) { + depth = block_depth + 1; + } + if (tensor_name.find("attn.ln") != std::string::npos) { + if (tensor_name.find(".bias") != std::string::npos) { + qk_norm = "ln"; + } else { + qk_norm = "rms"; + } + } + if (tensor_name.find("attn2") != std::string::npos) { + if (block_depth > d_self) { + d_self = block_depth; + } + } + } + } + + if (d_self >= 0) { + pos_embed_max_size *= 2; + num_patchs *= 4; + } + + LOG_INFO("MMDiT layers: %d (including %d MMDiT-x layers)", depth, d_self + 1); + + int64_t default_out_channels = in_channels; + hidden_size = 64 * depth; + context_embedder_out_dim = 64 * depth; + int64_t num_heads = depth; + + blocks["x_embedder"] = std::shared_ptr(new PatchEmbed(input_size, patch_size, in_channels, hidden_size, true)); + blocks["t_embedder"] = std::shared_ptr(new TimestepEmbedder(hidden_size)); + + if (adm_in_channels != -1) { + blocks["y_embedder"] = std::shared_ptr(new VectorEmbedder(adm_in_channels, hidden_size)); + } + + blocks["context_embedder"] = std::shared_ptr(new Linear(4096, context_embedder_out_dim, true, true)); + + for (int i = 0; i < depth; i++) { + blocks["joint_blocks." + std::to_string(i)] = std::shared_ptr(new JointBlock(hidden_size, + num_heads, + mlp_ratio, + qk_norm, + true, + i == depth - 1, + i <= d_self)); + } + + blocks["final_layer"] = std::shared_ptr(new FinalLayer(hidden_size, patch_size, out_channels)); + } + + struct ggml_tensor* + cropped_pos_embed(struct ggml_context* ctx, + int64_t h, + int64_t w) { + auto pos_embed = params["pos_embed"]; + + h = (h + 1) / patch_size; + w = (w + 1) / patch_size; + + GGML_ASSERT(h <= pos_embed_max_size && h > 0); + GGML_ASSERT(w <= pos_embed_max_size && w > 0); + + int64_t top = (pos_embed_max_size - h) / 2; + int64_t left = (pos_embed_max_size - w) / 2; + + auto spatial_pos_embed = ggml_reshape_3d(ctx, pos_embed, hidden_size, pos_embed_max_size, pos_embed_max_size); + + // spatial_pos_embed = spatial_pos_embed[:, top : top + h, left : left + w, :] + spatial_pos_embed = ggml_view_3d(ctx, + spatial_pos_embed, + hidden_size, + pos_embed_max_size, + h, + spatial_pos_embed->nb[1], + spatial_pos_embed->nb[2], + spatial_pos_embed->nb[2] * top); // [h, pos_embed_max_size, hidden_size] + spatial_pos_embed = ggml_cont(ctx, ggml_permute(ctx, spatial_pos_embed, 0, 2, 1, 3)); // [pos_embed_max_size, h, hidden_size] + spatial_pos_embed = ggml_view_3d(ctx, + spatial_pos_embed, + hidden_size, + h, + w, + spatial_pos_embed->nb[1], + spatial_pos_embed->nb[2], + spatial_pos_embed->nb[2] * left); // [w, h, hidden_size] + spatial_pos_embed = ggml_cont(ctx, ggml_permute(ctx, spatial_pos_embed, 0, 2, 1, 3)); // [h, w, hidden_size] + spatial_pos_embed = ggml_reshape_3d(ctx, spatial_pos_embed, hidden_size, h * w, 1); // [1, h*w, hidden_size] + return spatial_pos_embed; + } + + struct ggml_tensor* unpatchify(struct ggml_context* ctx, + struct ggml_tensor* x, + int64_t h, + int64_t w) { + // x: [N, H*W, patch_size * patch_size * C] + // return: [N, C, H, W] + int64_t n = x->ne[2]; + int64_t c = out_channels; + int64_t p = patch_size; + h = (h + 1) / p; + w = (w + 1) / p; + + GGML_ASSERT(h * w == x->ne[1]); + + x = ggml_reshape_4d(ctx, x, c, p * p, w * h, n); // [N, H*W, P*P, C] + x = ggml_cont(ctx, ggml_permute(ctx, x, 2, 0, 1, 3)); // [N, C, H*W, P*P] + x = ggml_reshape_4d(ctx, x, p, p, w, h * c * n); // [N*C*H, W, P, P] + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); // [N*C*H, P, W, P] + x = ggml_reshape_4d(ctx, x, p * w, p * h, c, n); // [N, C, H*P, W*P] + return x; + } + + struct ggml_tensor* forward_core_with_concat(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* c_mod, + struct ggml_tensor* context, + std::vector skip_layers = std::vector()) { + // x: [N, H*W, hidden_size] + // context: [N, n_context, d_context] + // c: [N, hidden_size] + // return: [N, N*W, patch_size * patch_size * out_channels] + auto final_layer = std::dynamic_pointer_cast(blocks["final_layer"]); + + for (int i = 0; i < depth; i++) { + // skip iteration if i is in skip_layers + if (skip_layers.size() > 0 && std::find(skip_layers.begin(), skip_layers.end(), i) != skip_layers.end()) { + continue; + } + + auto block = std::dynamic_pointer_cast(blocks["joint_blocks." + std::to_string(i)]); + + auto context_x = block->forward(ctx, context, x, c_mod); + context = context_x.first; + x = context_x.second; + } + + x = final_layer->forward(ctx, x, c_mod); // (N, T, patch_size ** 2 * out_channels) + + return x; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* t, + struct ggml_tensor* y = NULL, + struct ggml_tensor* context = NULL, + std::vector skip_layers = std::vector()) { + // Forward pass of DiT. + // x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + // t: (N,) tensor of diffusion timesteps + // y: (N, adm_in_channels) tensor of class labels + // context: (N, L, D) + // return: (N, C, H, W) + auto x_embedder = std::dynamic_pointer_cast(blocks["x_embedder"]); + auto t_embedder = std::dynamic_pointer_cast(blocks["t_embedder"]); + + int64_t w = x->ne[0]; + int64_t h = x->ne[1]; + + auto patch_embed = x_embedder->forward(ctx, x); // [N, H*W, hidden_size] + auto pos_embed = cropped_pos_embed(ctx, h, w); // [1, H*W, hidden_size] + x = ggml_add(ctx, patch_embed, pos_embed); // [N, H*W, hidden_size] + + auto c = t_embedder->forward(ctx, t); // [N, hidden_size] + if (y != NULL && adm_in_channels != -1) { + auto y_embedder = std::dynamic_pointer_cast(blocks["y_embedder"]); + + y = y_embedder->forward(ctx, y); // [N, hidden_size] + c = ggml_add(ctx, c, y); + } + + if (context != NULL) { + auto context_embedder = std::dynamic_pointer_cast(blocks["context_embedder"]); + + context = context_embedder->forward(ctx, context); // [N, L, D] aka [N, L, 1536] + } + + x = forward_core_with_concat(ctx, x, c, context, skip_layers); // (N, H*W, patch_size ** 2 * out_channels) + + x = unpatchify(ctx, x, h, w); // [N, C, H, W] + + return x; + } +}; +struct MMDiTRunner : public GGMLRunner { + MMDiT mmdit; + + static std::map empty_tensor_types; + + MMDiTRunner(ggml_backend_t backend, + std::map& tensor_types = empty_tensor_types, + const std::string prefix = "") + : GGMLRunner(backend), mmdit(tensor_types) { + mmdit.init(params_ctx, tensor_types, prefix); + } + + std::string get_desc() { + return "mmdit"; + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + mmdit.get_param_tensors(tensors, prefix); + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* y, + std::vector skip_layers = std::vector()) { + struct ggml_cgraph* gf = ggml_new_graph_custom(compute_ctx, MMDIT_GRAPH_SIZE, false); + + x = to_backend(x); + context = to_backend(context); + y = to_backend(y); + timesteps = to_backend(timesteps); + + struct ggml_tensor* out = mmdit.forward(compute_ctx, + x, + timesteps, + y, + context, + skip_layers); + + ggml_build_forward_expand(gf, out); + + return gf; + } + + void compute(int n_threads, + struct ggml_tensor* x, + struct ggml_tensor* timesteps, + struct ggml_tensor* context, + struct ggml_tensor* y, + struct ggml_tensor** output = NULL, + struct ggml_context* output_ctx = NULL, + std::vector skip_layers = std::vector()) { + // x: [N, in_channels, h, w] + // timesteps: [N, ] + // context: [N, max_position, hidden_size]([N, 154, 4096]) or [1, max_position, hidden_size] + // y: [N, adm_in_channels] or [1, adm_in_channels] + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(x, timesteps, context, y, skip_layers); + }; + + GGMLRunner::compute(get_graph, n_threads, false, output, output_ctx); + } + + void test() { + struct ggml_init_params params; + params.mem_size = static_cast(10 * 1024 * 1024); // 10 MB + params.mem_buffer = NULL; + params.no_alloc = false; + + struct ggml_context* work_ctx = ggml_init(params); + GGML_ASSERT(work_ctx != NULL); + + { + // cpu f16: pass + // cpu f32: pass + // cuda f16: pass + // cuda f32: pass + auto x = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 128, 128, 16, 1); + std::vector timesteps_vec(1, 999.f); + auto timesteps = vector_to_ggml_tensor(work_ctx, timesteps_vec); + ggml_set_f32(x, 0.01f); + // print_ggml_tensor(x); + + auto context = ggml_new_tensor_3d(work_ctx, GGML_TYPE_F32, 4096, 154, 1); + ggml_set_f32(context, 0.01f); + // print_ggml_tensor(context); + + auto y = ggml_new_tensor_2d(work_ctx, GGML_TYPE_F32, 2048, 1); + ggml_set_f32(y, 0.01f); + // print_ggml_tensor(y); + + struct ggml_tensor* out = NULL; + + int t0 = ggml_time_ms(); + compute(8, x, timesteps, context, y, &out, work_ctx); + int t1 = ggml_time_ms(); + + print_ggml_tensor(out); + LOG_DEBUG("mmdit test done in %dms", t1 - t0); + } + } + + static void load_from_file_and_test(const std::string& file_path) { + // ggml_backend_t backend = ggml_backend_cuda_init(0); + ggml_backend_t backend = ggml_backend_cpu_init(); + ggml_type model_data_type = GGML_TYPE_F16; + std::shared_ptr mmdit = std::shared_ptr(new MMDiTRunner(backend)); + { + LOG_INFO("loading from '%s'", file_path.c_str()); + + mmdit->alloc_params_buffer(); + std::map tensors; + mmdit->get_param_tensors(tensors, "model.diffusion_model"); + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path)) { + LOG_ERROR("init model loader from file failed: '%s'", file_path.c_str()); + return; + } + + bool success = model_loader.load_tensors(tensors, backend); + + if (!success) { + LOG_ERROR("load tensors from model loader failed"); + return; + } + + LOG_INFO("mmdit model loaded"); + } + mmdit->test(); + } +}; + +#endif \ No newline at end of file diff --git a/model.cpp b/model.cpp new file mode 100644 index 000000000..559c876c6 --- /dev/null +++ b/model.cpp @@ -0,0 +1,2257 @@ +#include +#include +#include +#include +#include +#include +#include + +#include "model.h" +#include "stable-diffusion.h" +#include "util.h" +#include "vocab.hpp" + +#include "ggml-alloc.h" +#include "ggml-backend.h" +#include "ggml-cpu.h" +#include "ggml.h" + +#include "stable-diffusion.h" + +#ifdef SD_USE_METAL +#include "ggml-metal.h" +#endif + +#ifdef SD_USE_VULKAN +#include "ggml-vulkan.h" +#endif + +#ifdef SD_USE_OPENCL +#include "ggml-opencl.h" +#endif + +#define ST_HEADER_SIZE_LEN 8 + +uint64_t read_u64(uint8_t* buffer) { + // little endian + uint64_t value = 0; + value |= static_cast(buffer[7]) << 56; + value |= static_cast(buffer[6]) << 48; + value |= static_cast(buffer[5]) << 40; + value |= static_cast(buffer[4]) << 32; + value |= static_cast(buffer[3]) << 24; + value |= static_cast(buffer[2]) << 16; + value |= static_cast(buffer[1]) << 8; + value |= static_cast(buffer[0]); + return value; +} + +int32_t read_int(uint8_t* buffer) { + // little endian + int value = 0; + value |= buffer[3] << 24; + value |= buffer[2] << 16; + value |= buffer[1] << 8; + value |= buffer[0]; + return value; +} + +uint16_t read_short(uint8_t* buffer) { + // little endian + uint16_t value = 0; + value |= buffer[1] << 8; + value |= buffer[0]; + return value; +} + +/*================================================= Preprocess ==================================================*/ + +std::string self_attn_names[] = { + "self_attn.q_proj.weight", + "self_attn.k_proj.weight", + "self_attn.v_proj.weight", + "self_attn.q_proj.bias", + "self_attn.k_proj.bias", + "self_attn.v_proj.bias", +}; + +const char* unused_tensors[] = { + "betas", + "alphas_cumprod_prev", + "sqrt_alphas_cumprod", + "sqrt_one_minus_alphas_cumprod", + "log_one_minus_alphas_cumprod", + "sqrt_recip_alphas_cumprod", + "sqrt_recipm1_alphas_cumprod", + "posterior_variance", + "posterior_log_variance_clipped", + "posterior_mean_coef1", + "posterior_mean_coef2", + "cond_stage_model.transformer.text_model.embeddings.position_ids", + "cond_stage_model.model.logit_scale", + "cond_stage_model.model.text_projection", + "conditioner.embedders.0.transformer.text_model.embeddings.position_ids", + "conditioner.embedders.0.model.logit_scale", + "conditioner.embedders.1.model.logit_scale", + "model.diffusion_model.time_embedding.cond_proj.weight", + "unet.time_embedding.cond_proj.weight", + "model_ema.decay", + "model_ema.num_updates", + "model_ema.diffusion_model", + "embedding_manager", + "denoiser.sigmas", + "text_encoders.t5xxl.transformer.encoder.embed_tokens.weight", // only used during training +}; + +bool is_unused_tensor(std::string name) { + for (int i = 0; i < sizeof(unused_tensors) / sizeof(const char*); i++) { + if (starts_with(name, unused_tensors[i])) { + return true; + } + } + return false; +} + +std::unordered_map open_clip_to_hf_clip_model = { + {"model.ln_final.bias", "transformer.text_model.final_layer_norm.bias"}, + {"model.ln_final.weight", "transformer.text_model.final_layer_norm.weight"}, + {"model.positional_embedding", "transformer.text_model.embeddings.position_embedding.weight"}, + {"model.token_embedding.weight", "transformer.text_model.embeddings.token_embedding.weight"}, + {"model.text_projection", "transformer.text_model.text_projection"}, + {"model.visual.class_embedding", "transformer.vision_model.embeddings.class_embedding"}, + {"model.visual.conv1.weight", "transformer.vision_model.embeddings.patch_embedding.weight"}, + {"model.visual.ln_post.bias", "transformer.vision_model.post_layernorm.bias"}, + {"model.visual.ln_post.weight", "transformer.vision_model.post_layernorm.weight"}, + {"model.visual.ln_pre.bias", "transformer.vision_model.pre_layernorm.bias"}, + {"model.visual.ln_pre.weight", "transformer.vision_model.pre_layernorm.weight"}, + {"model.visual.positional_embedding", "transformer.vision_model.embeddings.position_embedding.weight"}, + {"model.visual.proj", "transformer.visual_projection.weight"}, +}; + +std::unordered_map open_clip_to_hk_clip_resblock = { + {"attn.out_proj.bias", "self_attn.out_proj.bias"}, + {"attn.out_proj.weight", "self_attn.out_proj.weight"}, + {"ln_1.bias", "layer_norm1.bias"}, + {"ln_1.weight", "layer_norm1.weight"}, + {"ln_2.bias", "layer_norm2.bias"}, + {"ln_2.weight", "layer_norm2.weight"}, + {"mlp.c_fc.bias", "mlp.fc1.bias"}, + {"mlp.c_fc.weight", "mlp.fc1.weight"}, + {"mlp.c_proj.bias", "mlp.fc2.bias"}, + {"mlp.c_proj.weight", "mlp.fc2.weight"}, +}; + +std::unordered_map vae_decoder_name_map = { + {"first_stage_model.decoder.mid.attn_1.to_k.bias", "first_stage_model.decoder.mid.attn_1.k.bias"}, + {"first_stage_model.decoder.mid.attn_1.to_k.weight", "first_stage_model.decoder.mid.attn_1.k.weight"}, + {"first_stage_model.decoder.mid.attn_1.to_out.0.bias", "first_stage_model.decoder.mid.attn_1.proj_out.bias"}, + {"first_stage_model.decoder.mid.attn_1.to_out.0.weight", "first_stage_model.decoder.mid.attn_1.proj_out.weight"}, + {"first_stage_model.decoder.mid.attn_1.to_q.bias", "first_stage_model.decoder.mid.attn_1.q.bias"}, + {"first_stage_model.decoder.mid.attn_1.to_q.weight", "first_stage_model.decoder.mid.attn_1.q.weight"}, + {"first_stage_model.decoder.mid.attn_1.to_v.bias", "first_stage_model.decoder.mid.attn_1.v.bias"}, + {"first_stage_model.decoder.mid.attn_1.to_v.weight", "first_stage_model.decoder.mid.attn_1.v.weight"}, +}; + +std::unordered_map pmid_v2_name_map = { + {"pmid.qformer_perceiver.perceiver_resampler.layers.0.1.1.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.0.1.1.fc1.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.0.1.3.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.0.1.1.fc2.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.1.1.1.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.1.1.1.fc1.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.1.1.3.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.1.1.1.fc2.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.2.1.1.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.2.1.1.fc1.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.2.1.3.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.2.1.1.fc2.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.3.1.1.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.3.1.1.fc1.weight"}, + {"pmid.qformer_perceiver.perceiver_resampler.layers.3.1.3.weight", + "pmid.qformer_perceiver.perceiver_resampler.layers.3.1.1.fc2.weight"}, + {"pmid.qformer_perceiver.token_proj.0.bias", + "pmid.qformer_perceiver.token_proj.fc1.bias"}, + {"pmid.qformer_perceiver.token_proj.2.bias", + "pmid.qformer_perceiver.token_proj.fc2.bias"}, + {"pmid.qformer_perceiver.token_proj.0.weight", + "pmid.qformer_perceiver.token_proj.fc1.weight"}, + {"pmid.qformer_perceiver.token_proj.2.weight", + "pmid.qformer_perceiver.token_proj.fc2.weight"}, +}; + +std::string convert_open_clip_to_hf_clip(const std::string& name) { + std::string new_name = name; + std::string prefix; + if (contains(new_name, ".enc.")) { + // llama.cpp naming convention for T5 + size_t pos = new_name.find(".enc."); + if (pos != std::string::npos) { + new_name.replace(pos, 5, ".encoder."); + } + pos = new_name.find("blk."); + if (pos != std::string::npos) { + new_name.replace(pos, 4, "block."); + } + pos = new_name.find("output_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 12, "final_layer_norm."); + } + pos = new_name.find("attn_k."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.k."); + } + pos = new_name.find("attn_v."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.v."); + } + pos = new_name.find("attn_o."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.o."); + } + pos = new_name.find("attn_q."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.0.SelfAttention.q."); + } + pos = new_name.find("attn_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 10, "layer.0.layer_norm."); + } + pos = new_name.find("ffn_norm."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.layer_norm."); + } + pos = new_name.find("ffn_up."); + if (pos != std::string::npos) { + new_name.replace(pos, 7, "layer.1.DenseReluDense.wi_1."); + } + pos = new_name.find("ffn_down."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.DenseReluDense.wo."); + } + pos = new_name.find("ffn_gate."); + if (pos != std::string::npos) { + new_name.replace(pos, 9, "layer.1.DenseReluDense.wi_0."); + } + pos = new_name.find("attn_rel_b."); + if (pos != std::string::npos) { + new_name.replace(pos, 11, "layer.0.SelfAttention.relative_attention_bias."); + } + } else if (name == "text_encoders.t5xxl.transformer.token_embd.weight") { + new_name = "text_encoders.t5xxl.transformer.shared.weight"; + } + + if (starts_with(new_name, "conditioner.embedders.0.open_clip.")) { + prefix = "cond_stage_model."; + new_name = new_name.substr(strlen("conditioner.embedders.0.open_clip.")); + } else if (starts_with(new_name, "conditioner.embedders.0.")) { + prefix = "cond_stage_model."; + new_name = new_name.substr(strlen("conditioner.embedders.0.")); + } else if (starts_with(new_name, "conditioner.embedders.1.")) { + prefix = "cond_stage_model.1."; + new_name = new_name.substr(strlen("conditioner.embedders.0.")); + } else if (starts_with(new_name, "cond_stage_model.")) { + prefix = "cond_stage_model."; + new_name = new_name.substr(strlen("cond_stage_model.")); + } else if (ends_with(new_name, "vision_model.visual_projection.weight")) { + prefix = new_name.substr(0, new_name.size() - strlen("vision_model.visual_projection.weight")); + new_name = prefix + "visual_projection.weight"; + return new_name; + } else if (ends_with(new_name, "transformer.text_projection.weight")) { + prefix = new_name.substr(0, new_name.size() - strlen("transformer.text_projection.weight")); + new_name = prefix + "transformer.text_model.text_projection"; + return new_name; + } else { + return new_name; + } + + if (open_clip_to_hf_clip_model.find(new_name) != open_clip_to_hf_clip_model.end()) { + new_name = open_clip_to_hf_clip_model[new_name]; + } + + std::string open_clip_resblock_prefix = "model.transformer.resblocks."; + std::string hf_clip_resblock_prefix = "transformer.text_model.encoder.layers."; + + auto replace_suffix = [&]() { + if (new_name.find(open_clip_resblock_prefix) == 0) { + std::string remain = new_name.substr(open_clip_resblock_prefix.length()); + std::string idx = remain.substr(0, remain.find(".")); + std::string suffix = remain.substr(idx.length() + 1); + + if (suffix == "attn.in_proj_weight" || suffix == "attn.in_proj_bias") { + new_name = hf_clip_resblock_prefix + idx + "." + suffix; + } else if (open_clip_to_hk_clip_resblock.find(suffix) != open_clip_to_hk_clip_resblock.end()) { + std::string new_suffix = open_clip_to_hk_clip_resblock[suffix]; + new_name = hf_clip_resblock_prefix + idx + "." + new_suffix; + } + } + }; + + replace_suffix(); + + open_clip_resblock_prefix = "model.visual.transformer.resblocks."; + hf_clip_resblock_prefix = "transformer.vision_model.encoder.layers."; + + replace_suffix(); + + return prefix + new_name; +} + +std::string convert_vae_decoder_name(const std::string& name) { + if (vae_decoder_name_map.find(name) != vae_decoder_name_map.end()) { + return vae_decoder_name_map[name]; + } + return name; +} + +std::string convert_pmid_v2_name(const std::string& name) { + if (pmid_v2_name_map.find(name) != pmid_v2_name_map.end()) { + return pmid_v2_name_map[name]; + } + return name; +} + +/* If not a SDXL LoRA the unet" prefix will have already been replaced by this + * point and "te2" and "te1" don't seem to appear in non-SDXL only "te_" */ +std::string convert_sdxl_lora_name(std::string tensor_name) { + const std::pair sdxl_lora_name_lookup[] = { + {"unet", "model_diffusion_model"}, + {"te2", "cond_stage_model_1_transformer"}, + {"te1", "cond_stage_model_transformer"}, + {"text_encoder_2", "cond_stage_model_1_transformer"}, + {"text_encoder", "cond_stage_model_transformer"}, + }; + for (auto& pair_i : sdxl_lora_name_lookup) { + if (tensor_name.compare(0, pair_i.first.length(), pair_i.first) == 0) { + tensor_name = std::regex_replace(tensor_name, std::regex(pair_i.first), pair_i.second); + break; + } + } + return tensor_name; +} + +std::unordered_map> suffix_conversion_underline = { + { + "attentions", + { + {"to_k", "k"}, + {"to_q", "q"}, + {"to_v", "v"}, + {"to_out_0", "proj_out"}, + {"group_norm", "norm"}, + {"key", "k"}, + {"query", "q"}, + {"value", "v"}, + {"proj_attn", "proj_out"}, + }, + }, + { + "resnets", + { + {"conv1", "in_layers_2"}, + {"conv2", "out_layers_3"}, + {"norm1", "in_layers_0"}, + {"norm2", "out_layers_0"}, + {"time_emb_proj", "emb_layers_1"}, + {"conv_shortcut", "skip_connection"}, + }, + }, +}; + +std::unordered_map> suffix_conversion_dot = { + { + "attentions", + { + {"to_k", "k"}, + {"to_q", "q"}, + {"to_v", "v"}, + {"to_out.0", "proj_out"}, + {"group_norm", "norm"}, + {"key", "k"}, + {"query", "q"}, + {"value", "v"}, + {"proj_attn", "proj_out"}, + }, + }, + { + "resnets", + { + {"conv1", "in_layers.2"}, + {"conv2", "out_layers.3"}, + {"norm1", "in_layers.0"}, + {"norm2", "out_layers.0"}, + {"time_emb_proj", "emb_layers.1"}, + {"conv_shortcut", "skip_connection"}, + }, + }, +}; + +std::string convert_diffusers_name_to_compvis(std::string key, char seq) { + std::vector m; + + auto match = [](std::vector& match_list, const std::regex& regex, const std::string& key) { + auto r = std::smatch{}; + if (!std::regex_match(key, r, regex)) { + return false; + } + + match_list.clear(); + for (size_t i = 1; i < r.size(); ++i) { + match_list.push_back(r.str(i)); + } + return true; + }; + + std::unordered_map> suffix_conversion; + if (seq == '_') { + suffix_conversion = suffix_conversion_underline; + } else { + suffix_conversion = suffix_conversion_dot; + } + + auto get_converted_suffix = [&suffix_conversion](const std::string& outer_key, const std::string& inner_key) { + auto outer_iter = suffix_conversion.find(outer_key); + if (outer_iter != suffix_conversion.end()) { + auto inner_iter = outer_iter->second.find(inner_key); + if (inner_iter != outer_iter->second.end()) { + return inner_iter->second; + } + } + return inner_key; + }; + + // convert attn to out + if (ends_with(key, "to_out")) { + key += format("%c0", seq); + } + + // unet + if (match(m, std::regex(format("unet%cconv_in(.*)", seq)), key)) { + return format("model%cdiffusion_model%cinput_blocks%c0%c0", seq, seq, seq, seq) + m[0]; + } + + if (match(m, std::regex(format("unet%cconv%cout(.*)", seq, seq)), key)) { + return format("model%cdiffusion_model%cout%c2", seq, seq, seq) + m[0]; + } + + if (match(m, std::regex(format("unet%cconv_norm_out(.*)", seq)), key)) { + return format("model%cdiffusion_model%cout%c0", seq, seq, seq) + m[0]; + } + + if (match(m, std::regex(format("unet%ctime_embedding%clinear_(\\d+)(.*)", seq, seq)), key)) { + return format("model%cdiffusion_model%ctime_embed%c", seq, seq, seq) + std::to_string(std::stoi(m[0]) * 2 - 2) + m[1]; + } + + if (match(m, std::regex(format("unet%cadd_embedding%clinear_(\\d+)(.*)", seq, seq)), key)) { + return format("model%cdiffusion_model%clabel_emb%c0%c", seq, seq, seq, seq) + std::to_string(std::stoi(m[0]) * 2 - 2) + m[1]; + } + + if (match(m, std::regex(format("unet%cdown_blocks%c(\\d+)%c(attentions|resnets)%c(\\d+)%c(.+)", seq, seq, seq, seq, seq)), key)) { + std::string suffix = get_converted_suffix(m[1], m[3]); + // LOG_DEBUG("%s %s %s %s", m[0].c_str(), m[1].c_str(), m[2].c_str(), m[3].c_str()); + return format("model%cdiffusion_model%cinput_blocks%c", seq, seq, seq) + std::to_string(1 + std::stoi(m[0]) * 3 + std::stoi(m[2])) + seq + + (m[1] == "attentions" ? "1" : "0") + seq + suffix; + } + + if (match(m, std::regex(format("unet%cmid_block%c(attentions|resnets)%c(\\d+)%c(.+)", seq, seq, seq, seq)), key)) { + std::string suffix = get_converted_suffix(m[0], m[2]); + return format("model%cdiffusion_model%cmiddle_block%c", seq, seq, seq) + (m[0] == "attentions" ? "1" : std::to_string(std::stoi(m[1]) * 2)) + + seq + suffix; + } + + if (match(m, std::regex(format("unet%cup_blocks%c(\\d+)%c(attentions|resnets)%c(\\d+)%c(.+)", seq, seq, seq, seq, seq)), key)) { + std::string suffix = get_converted_suffix(m[1], m[3]); + return format("model%cdiffusion_model%coutput_blocks%c", seq, seq, seq) + std::to_string(std::stoi(m[0]) * 3 + std::stoi(m[2])) + seq + + (m[1] == "attentions" ? "1" : "0") + seq + suffix; + } + + if (match(m, std::regex(format("unet%cdown_blocks%c(\\d+)%cdownsamplers%c0%cconv", seq, seq, seq, seq, seq)), key)) { + return format("model%cdiffusion_model%cinput_blocks%c", seq, seq, seq) + std::to_string(3 + std::stoi(m[0]) * 3) + seq + "0" + seq + "op"; + } + + if (match(m, std::regex(format("unet%cup_blocks%c(\\d+)%cupsamplers%c0%cconv", seq, seq, seq, seq, seq)), key)) { + return format("model%cdiffusion_model%coutput_blocks%c", seq, seq, seq) + std::to_string(2 + std::stoi(m[0]) * 3) + seq + + (std::stoi(m[0]) > 0 ? "2" : "1") + seq + "conv"; + } + + // clip + if (match(m, std::regex(format("te%ctext_model%cencoder%clayers%c(\\d+)%c(.+)", seq, seq, seq, seq, seq)), key)) { + return format("cond_stage_model%ctransformer%ctext_model%cencoder%clayers%c", seq, seq, seq, seq, seq) + m[0] + seq + m[1]; + } + + if (match(m, std::regex(format("te%ctext_model(.*)", seq)), key)) { + return format("cond_stage_model%ctransformer%ctext_model", seq, seq) + m[0]; + } + + // clip-g + if (match(m, std::regex(format("te%c1%ctext_model%cencoder%clayers%c(\\d+)%c(.+)", seq, seq, seq, seq, seq, seq)), key)) { + return format("cond_stage_model%c1%ctransformer%ctext_model%cencoder%clayers%c", seq, seq, seq, seq, seq, seq) + m[0] + seq + m[1]; + } + + if (match(m, std::regex(format("te%c1%ctext_model(.*)", seq, seq)), key)) { + return format("cond_stage_model%c1%ctransformer%ctext_model", seq, seq, seq) + m[0]; + } + + if (match(m, std::regex(format("te%c1%ctext_projection", seq, seq)), key)) { + return format("cond_stage_model%c1%ctransformer%ctext_model%ctext_projection", seq, seq, seq, seq); + } + + // vae + if (match(m, std::regex(format("vae%c(.*)%cconv_norm_out(.*)", seq, seq)), key)) { + return format("first_stage_model%c%s%cnorm_out%s", seq, m[0].c_str(), seq, m[1].c_str()); + } + + if (match(m, std::regex(format("vae%c(.*)%cmid_block%c(attentions|resnets)%c(\\d+)%c(.+)", seq, seq, seq, seq, seq)), key)) { + std::string suffix; + std::string block_name; + if (m[1] == "attentions") { + block_name = "attn"; + suffix = get_converted_suffix(m[1], m[3]); + } else { + block_name = "block"; + suffix = m[3]; + } + return format("first_stage_model%c%s%cmid%c%s_%d%c%s", + seq, m[0].c_str(), seq, seq, block_name.c_str(), std::stoi(m[2]) + 1, seq, suffix.c_str()); + } + + if (match(m, std::regex(format("vae%c(.*)%cup_blocks%c(\\d+)%cresnets%c(\\d+)%c(.+)", seq, seq, seq, seq, seq, seq)), key)) { + std::string suffix = m[3]; + if (suffix == "conv_shortcut") { + suffix = "nin_shortcut"; + } + return format("first_stage_model%c%s%cup%c%d%cblock%c%s%c%s", + seq, m[0].c_str(), seq, seq, 3 - std::stoi(m[1]), seq, seq, m[2].c_str(), seq, suffix.c_str()); + } + + if (match(m, std::regex(format("vae%c(.*)%cdown_blocks%c(\\d+)%cdownsamplers%c0%cconv", seq, seq, seq, seq, seq, seq)), key)) { + return format("first_stage_model%c%s%cdown%c%d%cdownsample%cconv", + seq, m[0].c_str(), seq, seq, std::stoi(m[1]), seq, seq); + } + + if (match(m, std::regex(format("vae%c(.*)%cdown_blocks%c(\\d+)%cresnets%c(\\d+)%c(.+)", seq, seq, seq, seq, seq, seq)), key)) { + std::string suffix = m[3]; + if (suffix == "conv_shortcut") { + suffix = "nin_shortcut"; + } + return format("first_stage_model%c%s%cdown%c%d%cblock%c%s%c%s", + seq, m[0].c_str(), seq, seq, std::stoi(m[1]), seq, seq, m[2].c_str(), seq, suffix.c_str()); + } + + if (match(m, std::regex(format("vae%c(.*)%cup_blocks%c(\\d+)%cupsamplers%c0%cconv", seq, seq, seq, seq, seq, seq)), key)) { + return format("first_stage_model%c%s%cup%c%d%cupsample%cconv", + seq, m[0].c_str(), seq, seq, 3 - std::stoi(m[1]), seq, seq); + } + + if (match(m, std::regex(format("vae%c(.*)", seq)), key)) { + return format("first_stage_model%c", seq) + m[0]; + } + + return key; +} + +std::string convert_tensor_name(std::string name) { + if (starts_with(name, "diffusion_model")) { + name = "model." + name; + } + // size_t pos = name.find("lora_A"); + // if (pos != std::string::npos) { + // name.replace(pos, strlen("lora_A"), "lora_up"); + // } + // pos = name.find("lora_B"); + // if (pos != std::string::npos) { + // name.replace(pos, strlen("lora_B"), "lora_down"); + // } + std::string new_name = name; + if (starts_with(name, "cond_stage_model.") || starts_with(name, "conditioner.embedders.") || starts_with(name, "text_encoders.") || ends_with(name, ".vision_model.visual_projection.weight")) { + new_name = convert_open_clip_to_hf_clip(name); + } else if (starts_with(name, "first_stage_model.decoder")) { + new_name = convert_vae_decoder_name(name); + } else if (starts_with(name, "pmid.qformer_perceiver")) { + new_name = convert_pmid_v2_name(name); + } else if (starts_with(name, "control_model.")) { // for controlnet pth models + size_t pos = name.find('.'); + if (pos != std::string::npos) { + new_name = name.substr(pos + 1); + } + } else if (starts_with(name, "lora_")) { // for lora + size_t pos = name.find('.'); + if (pos != std::string::npos) { + std::string name_without_network_parts = name.substr(5, pos - 5); + std::string network_part = name.substr(pos + 1); + + // LOG_DEBUG("%s %s", name_without_network_parts.c_str(), network_part.c_str()); + std::string new_key = convert_diffusers_name_to_compvis(name_without_network_parts, '_'); + /* For dealing with the new SDXL LoRA tensor naming convention */ + new_key = convert_sdxl_lora_name(new_key); + + if (new_key.empty()) { + new_name = name; + } else { + new_name = "lora." + new_key + "." + network_part; + } + } else { + new_name = name; + } + } else if (contains(name, "lora_up") || contains(name, "lora_down") || + contains(name, "lora.up") || contains(name, "lora.down") || + contains(name, "lora_linear")) { + size_t pos = new_name.find(".processor"); + if (pos != std::string::npos) { + new_name.replace(pos, strlen(".processor"), ""); + } + // if (starts_with(new_name, "transformer.transformer_blocks") || starts_with(new_name, "transformer.single_transformer_blocks")) { + // new_name = "model.diffusion_model." + new_name; + // } + pos = new_name.rfind("lora"); + if (pos != std::string::npos) { + std::string name_without_network_parts = new_name.substr(0, pos - 1); + std::string network_part = new_name.substr(pos); + // LOG_DEBUG("%s %s", name_without_network_parts.c_str(), network_part.c_str()); + std::string new_key = convert_diffusers_name_to_compvis(name_without_network_parts, '.'); + new_key = convert_sdxl_lora_name(new_key); + replace_all_chars(new_key, '.', '_'); + size_t npos = network_part.rfind("_linear_layer"); + if (npos != std::string::npos) { + network_part.replace(npos, strlen("_linear_layer"), ""); + } + if (starts_with(network_part, "lora.")) { + network_part = "lora_" + network_part.substr(5); + } + if (new_key.size() > 0) { + new_name = "lora." + new_key + "." + network_part; + } + // LOG_DEBUG("new name: %s", new_name.c_str()); + } + } else if (starts_with(name, "unet") || starts_with(name, "vae") || starts_with(name, "te")) { // for diffuser + size_t pos = name.find_last_of('.'); + if (pos != std::string::npos) { + std::string name_without_network_parts = name.substr(0, pos); + std::string network_part = name.substr(pos + 1); + // LOG_DEBUG("%s %s", name_without_network_parts.c_str(), network_part.c_str()); + std::string new_key = convert_diffusers_name_to_compvis(name_without_network_parts, '.'); + if (new_key.empty()) { + new_name = name; + } else if (new_key == "cond_stage_model.1.transformer.text_model.text_projection") { + new_name = new_key; + } else { + new_name = new_key + "." + network_part; + } + } else { + new_name = name; + } + } else { + new_name = name; + } + // if (new_name != name) { + // LOG_DEBUG("%s => %s", name.c_str(), new_name.c_str()); + // } + return new_name; +} + +void add_preprocess_tensor_storage_types(std::map& tensor_storages_types, std::string name, enum ggml_type type) { + std::string new_name = convert_tensor_name(name); + + if (new_name.find("cond_stage_model") != std::string::npos && ends_with(new_name, "attn.in_proj_weight")) { + size_t prefix_size = new_name.find("attn.in_proj_weight"); + std::string prefix = new_name.substr(0, prefix_size); + tensor_storages_types[prefix + "self_attn.q_proj.weight"] = type; + tensor_storages_types[prefix + "self_attn.k_proj.weight"] = type; + tensor_storages_types[prefix + "self_attn.v_proj.weight"] = type; + } else if (new_name.find("cond_stage_model") != std::string::npos && ends_with(new_name, "attn.in_proj_bias")) { + size_t prefix_size = new_name.find("attn.in_proj_bias"); + std::string prefix = new_name.substr(0, prefix_size); + tensor_storages_types[prefix + "self_attn.q_proj.bias"] = type; + tensor_storages_types[prefix + "self_attn.k_proj.bias"] = type; + tensor_storages_types[prefix + "self_attn.v_proj.bias"] = type; + } else { + tensor_storages_types[new_name] = type; + } +} + +void preprocess_tensor(TensorStorage tensor_storage, + std::vector& processed_tensor_storages) { + std::vector result; + std::string new_name = convert_tensor_name(tensor_storage.name); + + // convert unet transformer linear to conv2d 1x1 + if (starts_with(new_name, "model.diffusion_model.") && + (ends_with(new_name, "proj_in.weight") || ends_with(new_name, "proj_out.weight"))) { + tensor_storage.unsqueeze(); + } + + // convert vae attn block linear to conv2d 1x1 + if (starts_with(new_name, "first_stage_model.") && new_name.find("attn_1") != std::string::npos) { + tensor_storage.unsqueeze(); + } + + tensor_storage.name = new_name; + + if (new_name.find("cond_stage_model") != std::string::npos && + ends_with(new_name, "attn.in_proj_weight")) { + size_t prefix_size = new_name.find("attn.in_proj_weight"); + std::string prefix = new_name.substr(0, prefix_size); + + std::vector chunks = tensor_storage.chunk(3); + chunks[0].name = prefix + "self_attn.q_proj.weight"; + chunks[1].name = prefix + "self_attn.k_proj.weight"; + chunks[2].name = prefix + "self_attn.v_proj.weight"; + + processed_tensor_storages.insert(processed_tensor_storages.end(), chunks.begin(), chunks.end()); + + } else if (new_name.find("cond_stage_model") != std::string::npos && + ends_with(new_name, "attn.in_proj_bias")) { + size_t prefix_size = new_name.find("attn.in_proj_bias"); + std::string prefix = new_name.substr(0, prefix_size); + + std::vector chunks = tensor_storage.chunk(3); + chunks[0].name = prefix + "self_attn.q_proj.bias"; + chunks[1].name = prefix + "self_attn.k_proj.bias"; + chunks[2].name = prefix + "self_attn.v_proj.bias"; + + processed_tensor_storages.insert(processed_tensor_storages.end(), chunks.begin(), chunks.end()); + } else { + processed_tensor_storages.push_back(tensor_storage); + } +} + +float bf16_to_f32(uint16_t bfloat16) { + uint32_t val_bits = (static_cast(bfloat16) << 16); + return *reinterpret_cast(&val_bits); +} + +uint16_t f8_e4m3_to_f16(uint8_t f8) { + // do we need to support uz? + + const uint32_t exponent_bias = 7; + if (f8 == 0xff) { + return ggml_fp32_to_fp16(-NAN); + } else if (f8 == 0x7f) { + return ggml_fp32_to_fp16(NAN); + } + + uint32_t sign = f8 & 0x80; + uint32_t exponent = (f8 & 0x78) >> 3; + uint32_t mantissa = f8 & 0x07; + uint32_t result = sign << 24; + if (exponent == 0) { + if (mantissa > 0) { + exponent = 0x7f - exponent_bias; + + // yes, 2 times + if ((mantissa & 0x04) == 0) { + mantissa &= 0x03; + mantissa <<= 1; + exponent -= 1; + } + if ((mantissa & 0x04) == 0) { + mantissa &= 0x03; + mantissa <<= 1; + exponent -= 1; + } + + result |= (mantissa & 0x03) << 21; + result |= exponent << 23; + } + } else { + result |= mantissa << 20; + exponent += 0x7f - exponent_bias; + result |= exponent << 23; + } + + return ggml_fp32_to_fp16(*reinterpret_cast(&result)); +} + +uint16_t f8_e5m2_to_f16(uint8_t fp8) { + uint8_t sign = (fp8 >> 7) & 0x1; + uint8_t exponent = (fp8 >> 2) & 0x1F; + uint8_t mantissa = fp8 & 0x3; + + uint16_t fp16_sign = sign << 15; + uint16_t fp16_exponent; + uint16_t fp16_mantissa; + + if (exponent == 0 && mantissa == 0) { // zero + return fp16_sign; + } + + if (exponent == 0x1F) { // NAN and INF + fp16_exponent = 0x1F; + fp16_mantissa = mantissa ? (mantissa << 8) : 0; + return fp16_sign | (fp16_exponent << 10) | fp16_mantissa; + } + + if (exponent == 0) { // subnormal numbers + fp16_exponent = 0; + fp16_mantissa = (mantissa << 8); + return fp16_sign | fp16_mantissa; + } + + // normal numbers + int16_t true_exponent = (int16_t)exponent - 15 + 15; + if (true_exponent <= 0) { + fp16_exponent = 0; + fp16_mantissa = (mantissa << 8); + } else if (true_exponent >= 0x1F) { + fp16_exponent = 0x1F; + fp16_mantissa = 0; + } else { + fp16_exponent = (uint16_t)true_exponent; + fp16_mantissa = mantissa << 8; + } + + return fp16_sign | (fp16_exponent << 10) | fp16_mantissa; +} + +void bf16_to_f32_vec(uint16_t* src, float* dst, int64_t n) { + // support inplace op + for (int64_t i = n - 1; i >= 0; i--) { + dst[i] = bf16_to_f32(src[i]); + } +} + +void f8_e4m3_to_f16_vec(uint8_t* src, uint16_t* dst, int64_t n) { + // support inplace op + for (int64_t i = n - 1; i >= 0; i--) { + dst[i] = f8_e4m3_to_f16(src[i]); + } +} +void f8_e5m2_to_f16_vec(uint8_t* src, uint16_t* dst, int64_t n) { + // support inplace op + for (int64_t i = n - 1; i >= 0; i--) { + dst[i] = f8_e5m2_to_f16(src[i]); + } +} + +void convert_tensor(void* src, + ggml_type src_type, + void* dst, + ggml_type dst_type, + int nrows, + int n_per_row) { + int n = nrows * n_per_row; + if (src_type == dst_type) { + size_t nbytes = n * ggml_type_size(src_type) / ggml_blck_size(src_type); + memcpy(((char*)dst), ((char*)src), nbytes); + } else if (src_type == GGML_TYPE_F32) { + if (dst_type == GGML_TYPE_F16) { + ggml_fp32_to_fp16_row((float*)src, (ggml_fp16_t*)dst, n); + } else { + std::vector imatrix(n_per_row, 1.0f); // dummy importance matrix + const float* im = imatrix.data(); + ggml_quantize_chunk(dst_type, (float*)src, dst, 0, nrows, n_per_row, im); + } + } else if (dst_type == GGML_TYPE_F32) { + if (src_type == GGML_TYPE_F16) { + ggml_fp16_to_fp32_row((ggml_fp16_t*)src, (float*)dst, n); + } else { + auto qtype = ggml_get_type_traits(src_type); + if (qtype->to_float == NULL) { + throw std::runtime_error(format("type %s unsupported for integer quantization: no dequantization available", + ggml_type_name(src_type))); + } + qtype->to_float(src, (float*)dst, n); + } + } else { + // src_type == GGML_TYPE_F16 => dst_type is quantized + // src_type is quantized => dst_type == GGML_TYPE_F16 or dst_type is quantized + auto qtype = ggml_get_type_traits(src_type); + if (qtype->to_float == NULL) { + throw std::runtime_error(format("type %s unsupported for integer quantization: no dequantization available", + ggml_type_name(src_type))); + } + std::vector buf; + buf.resize(sizeof(float) * n); + char* src_data_f32 = buf.data(); + qtype->to_float(src, (float*)src_data_f32, n); + if (dst_type == GGML_TYPE_F16) { + ggml_fp32_to_fp16_row((float*)src_data_f32, (ggml_fp16_t*)dst, n); + } else { + std::vector imatrix(n_per_row, 1.0f); // dummy importance matrix + const float* im = imatrix.data(); + ggml_quantize_chunk(dst_type, (float*)src_data_f32, dst, 0, nrows, n_per_row, im); + } + } +} + +/*================================================= ModelLoader ==================================================*/ + +// ported from https://github.com/openai/CLIP/blob/main/clip/simple_tokenizer.py#L16 +std::map unicode_to_byte() { + std::map byte_to_unicode; + + // List of utf-8 byte ranges + for (int b = static_cast('!'); b <= static_cast('~'); ++b) { + byte_to_unicode[b] = static_cast(b); + } + + for (int b = 49825; b <= 49836; ++b) { + byte_to_unicode[b] = static_cast(b); + } + + for (int b = 49838; b <= 50111; ++b) { + byte_to_unicode[b] = static_cast(b); + } + // printf("%d %d %d %d\n", static_cast('Ā”'), static_cast('¬'), static_cast('Ā®'), static_cast('Ćæ')); + // exit(1); + + int n = 0; + for (int b = 0; b < 256; ++b) { + if (byte_to_unicode.find(b) == byte_to_unicode.end()) { + byte_to_unicode[b] = static_cast(256 + n); + n++; + } + } + + // byte_encoder = bytes_to_unicode() + // byte_decoder = {v: k for k, v in byte_encoder.items()} + std::map byte_decoder; + + for (const auto& entry : byte_to_unicode) { + byte_decoder[entry.second] = entry.first; + } + + byte_to_unicode.clear(); + + return byte_decoder; +} + +bool is_zip_file(const std::string& file_path) { + struct zip_t* zip = zip_open(file_path.c_str(), 0, 'r'); + if (zip == NULL) { + return false; + } + zip_close(zip); + return true; +} + +bool is_gguf_file(const std::string& file_path) { + std::ifstream file(file_path, std::ios::binary); + if (!file.is_open()) { + return false; + } + + char magic[4]; + + file.read(magic, sizeof(magic)); + if (!file) { + return false; + } + for (uint32_t i = 0; i < sizeof(magic); i++) { + if (magic[i] != GGUF_MAGIC[i]) { + return false; + } + } + + return true; +} + +bool is_safetensors_file(const std::string& file_path) { + std::ifstream file(file_path, std::ios::binary); + if (!file.is_open()) { + return false; + } + + // get file size + file.seekg(0, file.end); + size_t file_size_ = file.tellg(); + file.seekg(0, file.beg); + + // read header size + if (file_size_ <= ST_HEADER_SIZE_LEN) { + return false; + } + + uint8_t header_size_buf[ST_HEADER_SIZE_LEN]; + file.read((char*)header_size_buf, ST_HEADER_SIZE_LEN); + if (!file) { + return false; + } + + size_t header_size_ = read_u64(header_size_buf); + if (header_size_ >= file_size_ || header_size_ <= 2) { + return false; + } + + // read header + std::vector header_buf; + header_buf.resize(header_size_ + 1); + header_buf[header_size_] = '\0'; + file.read(header_buf.data(), header_size_); + if (!file) { + return false; + } + nlohmann::json header_ = nlohmann::json::parse(header_buf.data()); + if (header_.is_discarded()) { + return false; + } + return true; +} + +bool ModelLoader::init_from_file(const std::string& file_path, const std::string& prefix) { + if (is_directory(file_path)) { + LOG_INFO("load %s using diffusers format", file_path.c_str()); + return init_from_diffusers_file(file_path, prefix); + } else if (is_gguf_file(file_path)) { + LOG_INFO("load %s using gguf format", file_path.c_str()); + return init_from_gguf_file(file_path, prefix); + } else if (is_safetensors_file(file_path)) { + LOG_INFO("load %s using safetensors format", file_path.c_str()); + return init_from_safetensors_file(file_path, prefix); + } else if (is_zip_file(file_path)) { + LOG_INFO("load %s using checkpoint format", file_path.c_str()); + return init_from_ckpt_file(file_path, prefix); + } else { + LOG_WARN("unknown format %s", file_path.c_str()); + return false; + } +} + +/*================================================= GGUFModelLoader ==================================================*/ + +bool ModelLoader::init_from_gguf_file(const std::string& file_path, const std::string& prefix) { + LOG_DEBUG("init from '%s'", file_path.c_str()); + file_paths_.push_back(file_path); + size_t file_index = file_paths_.size() - 1; + + gguf_context* ctx_gguf_ = NULL; + ggml_context* ctx_meta_ = NULL; + ctx_gguf_ = gguf_init_from_file(file_path.c_str(), {true, &ctx_meta_}); + if (!ctx_gguf_) { + LOG_ERROR("failed to open '%s'", file_path.c_str()); + return false; + } + + int n_tensors = gguf_get_n_tensors(ctx_gguf_); + + size_t total_size = 0; + size_t data_offset = gguf_get_data_offset(ctx_gguf_); + for (int i = 0; i < n_tensors; i++) { + std::string name = gguf_get_tensor_name(ctx_gguf_, i); + struct ggml_tensor* dummy = ggml_get_tensor(ctx_meta_, name.c_str()); + size_t offset = data_offset + gguf_get_tensor_offset(ctx_gguf_, i); + + // LOG_DEBUG("%s", name.c_str()); + + TensorStorage tensor_storage(prefix + name, dummy->type, dummy->ne, ggml_n_dims(dummy), file_index, offset); + + GGML_ASSERT(ggml_nbytes(dummy) == tensor_storage.nbytes()); + + tensor_storages.push_back(tensor_storage); + add_preprocess_tensor_storage_types(tensor_storages_types, tensor_storage.name, tensor_storage.type); + } + + gguf_free(ctx_gguf_); + ggml_free(ctx_meta_); + + return true; +} + +/*================================================= SafeTensorsModelLoader ==================================================*/ + +ggml_type str_to_ggml_type(const std::string& dtype) { + ggml_type ttype = GGML_TYPE_COUNT; + if (dtype == "F16") { + ttype = GGML_TYPE_F16; + } else if (dtype == "BF16") { + ttype = GGML_TYPE_F32; + } else if (dtype == "F32") { + ttype = GGML_TYPE_F32; + } else if (dtype == "F64") { + ttype = GGML_TYPE_F64; + } else if (dtype == "F8_E4M3") { + ttype = GGML_TYPE_F16; + } else if (dtype == "F8_E5M2") { + ttype = GGML_TYPE_F16; + } else if (dtype == "I64") { + ttype = GGML_TYPE_I64; + } + return ttype; +} + +// https://huggingface.co/docs/safetensors/index +bool ModelLoader::init_from_safetensors_file(const std::string& file_path, const std::string& prefix) { + LOG_DEBUG("init from '%s'", file_path.c_str()); + file_paths_.push_back(file_path); + size_t file_index = file_paths_.size() - 1; + std::ifstream file(file_path, std::ios::binary); + if (!file.is_open()) { + LOG_ERROR("failed to open '%s'", file_path.c_str()); + file_paths_.pop_back(); + return false; + } + + // get file size + file.seekg(0, file.end); + size_t file_size_ = file.tellg(); + file.seekg(0, file.beg); + + // read header size + if (file_size_ <= ST_HEADER_SIZE_LEN) { + LOG_ERROR("invalid safetensor file '%s'", file_path.c_str()); + file_paths_.pop_back(); + return false; + } + + uint8_t header_size_buf[ST_HEADER_SIZE_LEN]; + file.read((char*)header_size_buf, ST_HEADER_SIZE_LEN); + if (!file) { + LOG_ERROR("read safetensors header size failed: '%s'", file_path.c_str()); + return false; + } + + size_t header_size_ = read_u64(header_size_buf); + if (header_size_ >= file_size_) { + LOG_ERROR("invalid safetensor file '%s'", file_path.c_str()); + file_paths_.pop_back(); + return false; + } + + // read header + std::vector header_buf; + header_buf.resize(header_size_ + 1); + header_buf[header_size_] = '\0'; + file.read(header_buf.data(), header_size_); + if (!file) { + LOG_ERROR("read safetensors header failed: '%s'", file_path.c_str()); + file_paths_.pop_back(); + return false; + } + + nlohmann::json header_ = nlohmann::json::parse(header_buf.data()); + + for (auto& item : header_.items()) { + std::string name = item.key(); + nlohmann::json tensor_info = item.value(); + // LOG_DEBUG("%s %s\n", name.c_str(), tensor_info.dump().c_str()); + + if (name == "__metadata__") { + continue; + } + + if (is_unused_tensor(name)) { + continue; + } + + std::string dtype = tensor_info["dtype"]; + nlohmann::json shape = tensor_info["shape"]; + + size_t begin = tensor_info["data_offsets"][0].get(); + size_t end = tensor_info["data_offsets"][1].get(); + + ggml_type type = str_to_ggml_type(dtype); + if (type == GGML_TYPE_COUNT) { + LOG_ERROR("unsupported dtype '%s' (tensor '%s')", dtype.c_str(), name.c_str()); + return false; + } + + if (shape.size() > SD_MAX_DIMS) { + LOG_ERROR("invalid tensor '%s'", name.c_str()); + return false; + } + + int n_dims = (int)shape.size(); + int64_t ne[SD_MAX_DIMS] = {1, 1, 1, 1, 1}; + for (int i = 0; i < n_dims; i++) { + ne[i] = shape[i].get(); + } + + if (n_dims == 5) { + if (ne[3] == 1 && ne[4] == 1) { + n_dims = 4; + } else { + LOG_ERROR("invalid tensor '%s'", name.c_str()); + return false; + } + } + + // ggml_n_dims returns 1 for scalars + if (n_dims == 0) { + n_dims = 1; + } + + TensorStorage tensor_storage(prefix + name, type, ne, n_dims, file_index, ST_HEADER_SIZE_LEN + header_size_ + begin); + tensor_storage.reverse_ne(); + + size_t tensor_data_size = end - begin; + + if (dtype == "BF16") { + tensor_storage.is_bf16 = true; + GGML_ASSERT(tensor_storage.nbytes() == tensor_data_size * 2); + } else if (dtype == "F8_E4M3") { + tensor_storage.is_f8_e4m3 = true; + // f8 -> f16 + GGML_ASSERT(tensor_storage.nbytes() == tensor_data_size * 2); + } else if (dtype == "F8_E5M2") { + tensor_storage.is_f8_e5m2 = true; + // f8 -> f16 + GGML_ASSERT(tensor_storage.nbytes() == tensor_data_size * 2); + } else { + GGML_ASSERT(tensor_storage.nbytes() == tensor_data_size); + } + + tensor_storages.push_back(tensor_storage); + add_preprocess_tensor_storage_types(tensor_storages_types, tensor_storage.name, tensor_storage.type); + + // LOG_DEBUG("%s %s", tensor_storage.to_string().c_str(), dtype.c_str()); + } + + return true; +} + +/*================================================= DiffusersModelLoader ==================================================*/ + +bool ModelLoader::init_from_diffusers_file(const std::string& file_path, const std::string& prefix) { + std::string unet_path = path_join(file_path, "unet/diffusion_pytorch_model.safetensors"); + std::string vae_path = path_join(file_path, "vae/diffusion_pytorch_model.safetensors"); + std::string clip_path = path_join(file_path, "text_encoder/model.safetensors"); + std::string clip_g_path = path_join(file_path, "text_encoder_2/model.safetensors"); + + if (!init_from_safetensors_file(unet_path, "unet.")) { + return false; + } + for (auto ts : tensor_storages) { + if (ts.name.find("add_embedding") != std::string::npos || ts.name.find("label_emb") != std::string::npos) { + // probably SDXL + LOG_DEBUG("Fixing name for SDXL output blocks.2.2"); + for (auto& tensor_storage : tensor_storages) { + int len = 34; + auto pos = tensor_storage.name.find("unet.up_blocks.0.upsamplers.0.conv"); + if (pos == std::string::npos) { + len = 44; + pos = tensor_storage.name.find("model.diffusion_model.output_blocks.2.1.conv"); + } + if (pos != std::string::npos) { + tensor_storage.name = "model.diffusion_model.output_blocks.2.2.conv" + tensor_storage.name.substr(len); + LOG_DEBUG("NEW NAME: %s", tensor_storage.name.c_str()); + add_preprocess_tensor_storage_types(tensor_storages_types, tensor_storage.name, tensor_storage.type); + } + } + break; + } + } + + if (!init_from_safetensors_file(vae_path, "vae.")) { + LOG_WARN("Couldn't find working VAE in %s", file_path.c_str()); + // return false; + } + if (!init_from_safetensors_file(clip_path, "te.")) { + LOG_WARN("Couldn't find working text encoder in %s", file_path.c_str()); + // return false; + } + if (!init_from_safetensors_file(clip_g_path, "te.1.")) { + LOG_DEBUG("Couldn't find working second text encoder in %s", file_path.c_str()); + } + return true; +} + +/*================================================= CkptModelLoader ==================================================*/ + +// $ python -m pickletools sd-v1-4/archive/data.pkl | head -n 100 +// 0: \x80 PROTO 2 +// 2: } EMPTY_DICT +// 3: q BINPUT 0 +// 5: ( MARK +// 6: X BINUNICODE 'epoch' +// 16: q BINPUT 1 +// 18: K BININT1 6 +// 20: X BINUNICODE 'global_step' +// 36: q BINPUT 2 +// 38: J BININT 470000 +// 43: X BINUNICODE 'pytorch-lightning_version' +// 73: q BINPUT 3 +// 75: X BINUNICODE '1.4.2' +// 85: q BINPUT 4 +// 87: X BINUNICODE 'state_dict' +// 102: q BINPUT 5 +// 104: } EMPTY_DICT +// 105: q BINPUT 6 +// 107: ( MARK +// 108: X BINUNICODE 'betas' +// 118: q BINPUT 7 +// 120: c GLOBAL 'torch._utils _rebuild_tensor_v2' +// 153: q BINPUT 8 +// 155: ( MARK +// 156: ( MARK +// 157: X BINUNICODE 'storage' +// 169: q BINPUT 9 +// 171: c GLOBAL 'torch FloatStorage' +// 191: q BINPUT 10 +// 193: X BINUNICODE '0' +// 199: q BINPUT 11 +// 201: X BINUNICODE 'cpu' +// 209: q BINPUT 12 +// 211: M BININT2 1000 +// 214: t TUPLE (MARK at 156) +// 215: q BINPUT 13 +// 217: Q BINPERSID +// 218: K BININT1 0 +// 220: M BININT2 1000 +// ............................... +// 3201: q BINPUT 250 +// 3203: R REDUCE +// 3204: q BINPUT 251 +// 3206: X BINUNICODE 'model.diffusion_model.input_blocks.1.1.proj_in.weight' +// 3264: q BINPUT 252 +// 3266: h BINGET 8 +// 3268: ( MARK +// 3269: ( MARK +// 3270: h BINGET 9 +// 3272: h BINGET 10 +// 3274: X BINUNICODE '30' +// 3281: q BINPUT 253 +// 3283: h BINGET 12 +// 3285: J BININT 102400 +// 3290: t TUPLE (MARK at 3269) +// 3291: q BINPUT 254 +// 3293: Q BINPERSID +// 3294: K BININT1 0 +// 3296: ( MARK +// 3297: M BININT2 320 +// 3300: M BININT2 320 +// 3303: K BININT1 1 +// 3305: K BININT1 1 +// 3307: t TUPLE (MARK at 3296) +// 3308: q BINPUT 255 +// 3310: ( MARK +// 3311: M BININT2 320 +// 3314: K BININT1 1 +// 3316: K BININT1 1 +// 3318: K BININT1 1 +// 3320: t TUPLE (MARK at 3310) +// 3321: r LONG_BINPUT 256 +// 3326: \x89 NEWFALSE +// 3327: h BINGET 16 +// 3329: ) EMPTY_TUPLE +// 3330: R REDUCE +// 3331: r LONG_BINPUT 257 +// 3336: t TUPLE (MARK at 3268) +// 3337: r LONG_BINPUT 258 +// 3342: R REDUCE +// 3343: r LONG_BINPUT 259 +// 3348: X BINUNICODE 'model.diffusion_model.input_blocks.1.1.proj_in.bias' +// 3404: r LONG_BINPUT 260 +// 3409: h BINGET 8 +// 3411: ( MARK +// 3412: ( MARK +// 3413: h BINGET 9 +// 3415: h BINGET 10 +// 3417: X BINUNICODE '31' + +struct PickleTensorReader { + enum ReadPhase { + READ_NAME, + READ_DATA, + CHECK_SIZE, + READ_DIMENS + }; + ReadPhase phase = READ_NAME; + size_t entry_size = 0; + int32_t nelements = 0; + + TensorStorage tensor_storage; + + static ggml_type global_type; // all pickle_tensors data type + static bool read_global_type; + + bool read_int_value(uint32_t value) { + if (phase == CHECK_SIZE) { + if (entry_size == value * ggml_type_size(tensor_storage.type)) { + nelements = value; + phase = READ_DIMENS; + return true; + } else { + phase = READ_NAME; + } + } else if (phase == READ_DIMENS) { + if (tensor_storage.n_dims + 1 > SD_MAX_DIMS) { // too many dimens + phase = READ_NAME; + tensor_storage.n_dims = 0; + } + if (nelements % value == 0) { + tensor_storage.ne[tensor_storage.n_dims] = value; + tensor_storage.n_dims++; + } + } + return false; + } + + void read_global(const std::string& str) { + if (str == "FloatStorage") { + if (read_global_type) { + global_type = GGML_TYPE_F32; + read_global_type = false; + } + tensor_storage.type = GGML_TYPE_F32; + } else if (str == "HalfStorage") { + if (read_global_type) { + global_type = GGML_TYPE_F16; + read_global_type = false; + } + tensor_storage.type = GGML_TYPE_F16; + } + } + + void read_string(const std::string& str, struct zip_t* zip, std::string dir) { + if (str == "storage") { + read_global_type = true; + } else if (str != "state_dict") { + if (phase == READ_DATA) { + std::string entry_name = dir + "data/" + std::string(str); + + size_t i, n = zip_entries_total(zip); + for (i = 0; i < n; ++i) { + zip_entry_openbyindex(zip, i); + { + std::string name = zip_entry_name(zip); + if (name == entry_name) { + tensor_storage.index_in_zip = (int)i; + entry_size = zip_entry_size(zip); + zip_entry_close(zip); + break; + } + } + zip_entry_close(zip); + } + + phase = entry_size > 0 ? CHECK_SIZE : READ_NAME; + } + if (!read_global_type && phase == READ_NAME) { + tensor_storage.name = str; + phase = READ_DATA; + tensor_storage.type = global_type; + } + } + } +}; + +ggml_type PickleTensorReader::global_type = GGML_TYPE_F32; // all pickle_tensors data type +bool PickleTensorReader::read_global_type = false; + +int find_char(uint8_t* buffer, int len, char c) { + for (int pos = 0; pos < len; pos++) { + if (buffer[pos] == c) { + return pos; + } + } + return -1; +} + +#define MAX_STRING_BUFFER 512 + +bool ModelLoader::parse_data_pkl(uint8_t* buffer, + size_t buffer_size, + zip_t* zip, + std::string dir, + size_t file_index, + const std::string prefix) { + uint8_t* buffer_end = buffer + buffer_size; + if (buffer[0] == 0x80) { // proto + if (buffer[1] != 2) { + LOG_ERROR("Unsupported protocol\n"); + return false; + } + buffer += 2; // 0x80 and version + char string_buffer[MAX_STRING_BUFFER]; + bool finish = false; + PickleTensorReader reader; + // read pickle binary file + while (!finish && buffer < buffer_end) { + uint8_t opcode = *buffer; + buffer++; + // https://github.com/python/cpython/blob/3.7/Lib/pickletools.py#L1048 + // https://github.com/python/cpython/blob/main/Lib/pickle.py#L105 + switch (opcode) { + case '}': // EMPTY_DICT = b'}' # push empty dict + break; + case ']': // EMPTY_LIST = b']' # push empty list + break; + // skip unused sections + case 'h': // BINGET = b'h' # " " " " " " ; " " 1-byte arg + case 'q': // BINPUT = b'q' # " " " " " ; " " 1-byte arg + case 'Q': // BINPERSID = b'Q' # " " " ; " " " " stack + buffer++; + break; + case 'r': // LONG_BINPUT = b'r' # " " " " " ; " " 4-byte arg + buffer += 4; + break; + case 0x95: // FRAME = b'\x95' # indicate the beginning of a new frame + buffer += 8; + break; + case 0x94: // MEMOIZE = b'\x94' # store top of the stack in memo + break; + case '(': // MARK = b'(' # push special markobject on stack + break; + case 'K': // BININT1 = b'K' # push 1-byte unsigned int + { + uint8_t value = *buffer; + if (reader.read_int_value(value)) { + buffer++; + } + buffer++; + } break; + case 'M': // BININT2 = b'M' # push 2-byte unsigned int + { + uint16_t value = read_short(buffer); + if (reader.read_int_value(value)) { + buffer++; + } + buffer += 2; + } break; + case 'J': // BININT = b'J' # push four-byte signed int + { + const int32_t value = read_int(buffer); + if (reader.read_int_value(value)) { + buffer++; // skip tuple after read num_elements + } + buffer += 4; + } break; + case 'X': // BINUNICODE = b'X' # " " " ; counted UTF-8 string argument + { + const int32_t len = read_int(buffer); + buffer += 4; + memset(string_buffer, 0, MAX_STRING_BUFFER); + if (len > MAX_STRING_BUFFER) { + LOG_WARN("tensor name very large"); + } + memcpy(string_buffer, buffer, len < MAX_STRING_BUFFER ? len : (MAX_STRING_BUFFER - 1)); + buffer += len; + reader.read_string(string_buffer, zip, dir); + } break; + case 0x8C: // SHORT_BINUNICODE = b'\x8c' # push short string; UTF-8 length < 256 bytes + { + const int8_t len = *buffer; + buffer++; + memset(string_buffer, 0, MAX_STRING_BUFFER); + memcpy(string_buffer, buffer, len); + buffer += len; + // printf("String: '%s'\n", string_buffer); + } break; + case 'c': // GLOBAL = b'c' # push self.find_class(modname, name); 2 string args + { + int len = find_char(buffer, MAX_STRING_BUFFER, '\n'); + + buffer += len + 1; + len = find_char(buffer, MAX_STRING_BUFFER, '\n'); + + memset(string_buffer, 0, MAX_STRING_BUFFER); + memcpy(string_buffer, buffer, len); + buffer += len + 1; + reader.read_global(string_buffer); + } break; + case 0x86: // TUPLE2 = b'\x86' # build 2-tuple from two topmost stack items + case 0x85: // TUPLE1 = b'\x85' # build 1-tuple from stack top + case 't': // TUPLE = b't' # build tuple from topmost stack items + if (reader.phase == PickleTensorReader::READ_DIMENS) { + reader.tensor_storage.reverse_ne(); + reader.tensor_storage.file_index = file_index; + // if(strcmp(prefix.c_str(), "scarlett") == 0) + // printf(" ZIP got tensor %s \n ", reader.tensor_storage.name.c_str()); + reader.tensor_storage.name = prefix + reader.tensor_storage.name; + tensor_storages.push_back(reader.tensor_storage); + add_preprocess_tensor_storage_types(tensor_storages_types, reader.tensor_storage.name, reader.tensor_storage.type); + + // LOG_DEBUG("%s", reader.tensor_storage.name.c_str()); + // reset + reader = PickleTensorReader(); + } + break; + case '.': // STOP = b'.' # every pickle ends with STOP + finish = true; + break; + default: + break; + } + } + } + return true; +} + +bool ModelLoader::init_from_ckpt_file(const std::string& file_path, const std::string& prefix) { + LOG_DEBUG("init from '%s'", file_path.c_str()); + file_paths_.push_back(file_path); + size_t file_index = file_paths_.size() - 1; + + struct zip_t* zip = zip_open(file_path.c_str(), 0, 'r'); + if (zip == NULL) { + LOG_ERROR("failed to open '%s'", file_path.c_str()); + return false; + } + int n = (int)zip_entries_total(zip); + for (int i = 0; i < n; ++i) { + zip_entry_openbyindex(zip, i); + { + std::string name = zip_entry_name(zip); + size_t pos = name.find("data.pkl"); + if (pos != std::string::npos) { + std::string dir = name.substr(0, pos); + printf("ZIP %d, name = %s, dir = %s \n", i, name.c_str(), dir.c_str()); + void* pkl_data = NULL; + size_t pkl_size; + zip_entry_read(zip, &pkl_data, &pkl_size); + + // LOG_DEBUG("%lld", pkl_size); + + parse_data_pkl((uint8_t*)pkl_data, pkl_size, zip, dir, file_index, prefix); + + free(pkl_data); + } + } + zip_entry_close(zip); + } + zip_close(zip); + return true; +} + +bool ModelLoader::model_is_unet() { + for (auto& tensor_storage : tensor_storages) { + if (tensor_storage.name.find("model.diffusion_model.input_blocks.") != std::string::npos) { + return true; + } + } + return false; +} + +SDVersion ModelLoader::get_sd_version() { + TensorStorage token_embedding_weight, input_block_weight; + bool input_block_checked = false; + + bool has_multiple_encoders = false; + bool is_unet = false; + + bool is_xl = false; + bool is_flux = false; + +#define found_family (is_xl || is_flux) + for (auto& tensor_storage : tensor_storages) { + if (!found_family) { + if (tensor_storage.name.find("model.diffusion_model.double_blocks.") != std::string::npos) { + is_flux = true; + if (input_block_checked) { + break; + } + } + if (tensor_storage.name.find("model.diffusion_model.joint_blocks.") != std::string::npos) { + return VERSION_SD3; + } + if (tensor_storage.name.find("model.diffusion_model.input_blocks.") != std::string::npos || tensor_storage.name.find("unet.down_blocks.") != std::string::npos) { + is_unet = true; + if (has_multiple_encoders) { + is_xl = true; + if (input_block_checked) { + break; + } + } + } + if (tensor_storage.name.find("conditioner.embedders.1") != std::string::npos || tensor_storage.name.find("cond_stage_model.1") != std::string::npos || tensor_storage.name.find("te.1") != std::string::npos) { + has_multiple_encoders = true; + if (is_unet) { + is_xl = true; + if (input_block_checked) { + break; + } + } + } + if (tensor_storage.name.find("model.diffusion_model.input_blocks.8.0.time_mixer.mix_factor") != std::string::npos) { + return VERSION_SVD; + } + } + if (tensor_storage.name == "cond_stage_model.transformer.text_model.embeddings.token_embedding.weight" || + tensor_storage.name == "cond_stage_model.model.token_embedding.weight" || + tensor_storage.name == "text_model.embeddings.token_embedding.weight" || + tensor_storage.name == "te.text_model.embeddings.token_embedding.weight" || + tensor_storage.name == "conditioner.embedders.0.model.token_embedding.weight" || + tensor_storage.name == "conditioner.embedders.0.transformer.text_model.embeddings.token_embedding.weight") { + token_embedding_weight = tensor_storage; + // break; + } + if (tensor_storage.name == "model.diffusion_model.input_blocks.0.0.weight" || tensor_storage.name == "model.diffusion_model.img_in.weight" || tensor_storage.name == "unet.conv_in.weight") { + input_block_weight = tensor_storage; + input_block_checked = true; + if (found_family) { + break; + } + } + } + bool is_inpaint = input_block_weight.ne[2] == 9; + if (is_xl) { + if (is_inpaint) { + return VERSION_SDXL_INPAINT; + } + return VERSION_SDXL; + } + + if (is_flux) { + is_inpaint = input_block_weight.ne[0] == 384; + if (is_inpaint) { + return VERSION_FLUX_FILL; + } + return VERSION_FLUX; + } + + if (token_embedding_weight.ne[0] == 768) { + if (is_inpaint) { + return VERSION_SD1_INPAINT; + } + return VERSION_SD1; + } else if (token_embedding_weight.ne[0] == 1024) { + if (is_inpaint) { + return VERSION_SD2_INPAINT; + } + return VERSION_SD2; + } + return VERSION_COUNT; +} + +ggml_type ModelLoader::get_sd_wtype() { + for (auto& tensor_storage : tensor_storages) { + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + + if (ggml_is_quantized(tensor_storage.type)) { + return tensor_storage.type; + } + + if (tensor_should_be_converted(tensor_storage, GGML_TYPE_Q4_K)) { + return tensor_storage.type; + } + } + return GGML_TYPE_COUNT; +} + +ggml_type ModelLoader::get_conditioner_wtype() { + for (auto& tensor_storage : tensor_storages) { + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + + if ((tensor_storage.name.find("text_encoders") == std::string::npos && + tensor_storage.name.find("cond_stage_model") == std::string::npos && + tensor_storage.name.find("te.text_model.") == std::string::npos && + tensor_storage.name.find("conditioner") == std::string::npos)) { + continue; + } + + if (ggml_is_quantized(tensor_storage.type)) { + return tensor_storage.type; + } + + if (tensor_should_be_converted(tensor_storage, GGML_TYPE_Q4_K)) { + return tensor_storage.type; + } + } + return GGML_TYPE_COUNT; +} + +ggml_type ModelLoader::get_diffusion_model_wtype() { + for (auto& tensor_storage : tensor_storages) { + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + + if (tensor_storage.name.find("model.diffusion_model.") == std::string::npos && tensor_storage.name.find("unet.") == std::string::npos) { + continue; + } + + if (ggml_is_quantized(tensor_storage.type)) { + return tensor_storage.type; + } + + if (tensor_should_be_converted(tensor_storage, GGML_TYPE_Q4_K)) { + return tensor_storage.type; + } + } + return GGML_TYPE_COUNT; +} + +ggml_type ModelLoader::get_vae_wtype() { + for (auto& tensor_storage : tensor_storages) { + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + + if (tensor_storage.name.find("vae.") == std::string::npos && + tensor_storage.name.find("first_stage_model") == std::string::npos) { + continue; + } + + if (ggml_is_quantized(tensor_storage.type)) { + return tensor_storage.type; + } + + if (tensor_should_be_converted(tensor_storage, GGML_TYPE_Q4_K)) { + return tensor_storage.type; + } + } + return GGML_TYPE_COUNT; +} + +void ModelLoader::set_wtype_override(ggml_type wtype, std::string prefix) { + for (auto& pair : tensor_storages_types) { + if (prefix.size() < 1 || pair.first.substr(0, prefix.size()) == prefix) { + bool found = false; + for (auto& tensor_storage : tensor_storages) { + std::map temp; + add_preprocess_tensor_storage_types(temp, tensor_storage.name, tensor_storage.type); + for (auto& preprocessed_name : temp) { + if (preprocessed_name.first == pair.first) { + if (tensor_should_be_converted(tensor_storage, wtype)) { + pair.second = wtype; + } + found = true; + break; + } + } + if (found) { + break; + } + } + } + } +} + +std::string ModelLoader::load_merges() { + std::string merges_utf8_str(reinterpret_cast(merges_utf8_c_str), sizeof(merges_utf8_c_str)); + return merges_utf8_str; +} + +std::string ModelLoader::load_t5_tokenizer_json() { + std::string json_str(reinterpret_cast(t5_tokenizer_json_str), sizeof(t5_tokenizer_json_str)); + return json_str; +} + +std::vector remove_duplicates(const std::vector& vec) { + std::vector res; + std::unordered_map name_to_index_map; + + for (size_t i = 0; i < vec.size(); ++i) { + const std::string& current_name = vec[i].name; + auto it = name_to_index_map.find(current_name); + + if (it != name_to_index_map.end()) { + res[it->second] = vec[i]; + } else { + name_to_index_map[current_name] = i; + res.push_back(vec[i]); + } + } + + // vec.resize(name_to_index_map.size()); + + return res; +} + +bool ModelLoader::load_tensors(on_new_tensor_cb_t on_new_tensor_cb, ggml_backend_t backend) { + std::vector processed_tensor_storages; + for (auto& tensor_storage : tensor_storages) { + // LOG_DEBUG("%s", name.c_str()); + + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + + preprocess_tensor(tensor_storage, processed_tensor_storages); + } + std::vector dedup = remove_duplicates(processed_tensor_storages); + processed_tensor_storages = dedup; + + bool success = true; + for (size_t file_index = 0; file_index < file_paths_.size(); file_index++) { + std::string file_path = file_paths_[file_index]; + LOG_DEBUG("loading tensors from %s", file_path.c_str()); + + std::ifstream file(file_path, std::ios::binary); + if (!file.is_open()) { + LOG_ERROR("failed to open '%s'", file_path.c_str()); + return false; + } + + bool is_zip = false; + for (auto& tensor_storage : tensor_storages) { + if (tensor_storage.file_index != file_index) { + continue; + } + if (tensor_storage.index_in_zip >= 0) { + is_zip = true; + break; + } + } + + struct zip_t* zip = NULL; + if (is_zip) { + zip = zip_open(file_path.c_str(), 0, 'r'); + if (zip == NULL) { + LOG_ERROR("failed to open zip '%s'", file_path.c_str()); + return false; + } + } + + std::vector read_buffer; + std::vector convert_buffer; + + auto read_data = [&](const TensorStorage& tensor_storage, char* buf, size_t n) { + if (zip != NULL) { + zip_entry_openbyindex(zip, tensor_storage.index_in_zip); + size_t entry_size = zip_entry_size(zip); + if (entry_size != n) { + read_buffer.resize(entry_size); + zip_entry_noallocread(zip, (void*)read_buffer.data(), entry_size); + memcpy((void*)buf, (void*)(read_buffer.data() + tensor_storage.offset), n); + } else { + zip_entry_noallocread(zip, (void*)buf, n); + } + zip_entry_close(zip); + } else { + file.seekg(tensor_storage.offset); + file.read(buf, n); + if (!file) { + LOG_ERROR("read tensor data failed: '%s'", file_path.c_str()); + return false; + } + } + return true; + }; + int tensor_count = 0; + int64_t t1 = ggml_time_ms(); + bool partial = false; + for (auto& tensor_storage : processed_tensor_storages) { + if (tensor_storage.file_index != file_index) { + ++tensor_count; + continue; + } + ggml_tensor* dst_tensor = NULL; + + success = on_new_tensor_cb(tensor_storage, &dst_tensor); + if (!success) { + LOG_WARN("process tensor failed: '%s'", tensor_storage.name.c_str()); + break; + } + + if (dst_tensor == NULL) { + ++tensor_count; + continue; + } + + size_t nbytes_to_read = tensor_storage.nbytes_to_read(); + + if (dst_tensor->buffer == NULL || ggml_backend_buffer_is_host(dst_tensor->buffer)) { + // for the CPU and Metal backend, we can copy directly into the tensor + if (tensor_storage.type == dst_tensor->type) { + GGML_ASSERT(ggml_nbytes(dst_tensor) == tensor_storage.nbytes()); + read_data(tensor_storage, (char*)dst_tensor->data, nbytes_to_read); + + if (tensor_storage.is_bf16) { + // inplace op + bf16_to_f32_vec((uint16_t*)dst_tensor->data, (float*)dst_tensor->data, tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e4m3) { + // inplace op + f8_e4m3_to_f16_vec((uint8_t*)dst_tensor->data, (uint16_t*)dst_tensor->data, tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e5m2) { + // inplace op + f8_e5m2_to_f16_vec((uint8_t*)dst_tensor->data, (uint16_t*)dst_tensor->data, tensor_storage.nelements()); + } + } else { + read_buffer.resize(tensor_storage.nbytes()); + read_data(tensor_storage, (char*)read_buffer.data(), nbytes_to_read); + + if (tensor_storage.is_bf16) { + // inplace op + bf16_to_f32_vec((uint16_t*)read_buffer.data(), (float*)read_buffer.data(), tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e4m3) { + // inplace op + f8_e4m3_to_f16_vec((uint8_t*)read_buffer.data(), (uint16_t*)read_buffer.data(), tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e5m2) { + // inplace op + f8_e5m2_to_f16_vec((uint8_t*)read_buffer.data(), (uint16_t*)read_buffer.data(), tensor_storage.nelements()); + } + + convert_tensor((void*)read_buffer.data(), tensor_storage.type, dst_tensor->data, + dst_tensor->type, (int)tensor_storage.nelements() / (int)tensor_storage.ne[0], (int)tensor_storage.ne[0]); + } + } else { + read_buffer.resize(tensor_storage.nbytes()); + read_data(tensor_storage, (char*)read_buffer.data(), nbytes_to_read); + + if (tensor_storage.is_bf16) { + // inplace op + bf16_to_f32_vec((uint16_t*)read_buffer.data(), (float*)read_buffer.data(), tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e4m3) { + // inplace op + f8_e4m3_to_f16_vec((uint8_t*)read_buffer.data(), (uint16_t*)read_buffer.data(), tensor_storage.nelements()); + } else if (tensor_storage.is_f8_e5m2) { + // inplace op + f8_e5m2_to_f16_vec((uint8_t*)read_buffer.data(), (uint16_t*)read_buffer.data(), tensor_storage.nelements()); + } + + if (tensor_storage.type == dst_tensor->type) { + // copy to device memory + ggml_backend_tensor_set(dst_tensor, read_buffer.data(), 0, ggml_nbytes(dst_tensor)); + } else { + // convert first, then copy to device memory + convert_buffer.resize(ggml_nbytes(dst_tensor)); + convert_tensor((void*)read_buffer.data(), tensor_storage.type, + (void*)convert_buffer.data(), dst_tensor->type, + (int)tensor_storage.nelements() / (int)tensor_storage.ne[0], (int)tensor_storage.ne[0]); + ggml_backend_tensor_set(dst_tensor, convert_buffer.data(), 0, ggml_nbytes(dst_tensor)); + } + } + size_t tensor_max = processed_tensor_storages.size(); + int64_t t2 = ggml_time_ms(); + pretty_progress(++tensor_count, tensor_max, (t2 - t1) / 1000.0f); + t1 = t2; + partial = tensor_count != tensor_max; + } + + if (zip != NULL) { + zip_close(zip); + } + + if (partial) { + printf("\n"); + } + + if (!success) { + break; + } + } + return success; +} + +bool ModelLoader::load_tensors(std::map& tensors, + ggml_backend_t backend, + std::set ignore_tensors) { + std::set tensor_names_in_file; + auto on_new_tensor_cb = [&](const TensorStorage& tensor_storage, ggml_tensor** dst_tensor) -> bool { + const std::string& name = tensor_storage.name; + // LOG_DEBUG("%s", tensor_storage.to_string().c_str()); + tensor_names_in_file.insert(name); + + struct ggml_tensor* real; + if (tensors.find(name) != tensors.end()) { + real = tensors[name]; + } else { + for (auto& ignore_tensor : ignore_tensors) { + if (starts_with(name, ignore_tensor)) { + return true; + } + } + LOG_INFO("unknown tensor '%s' in model file", tensor_storage.to_string().c_str()); + return true; + } + + if ( + real->ne[0] != tensor_storage.ne[0] || + real->ne[1] != tensor_storage.ne[1] || + real->ne[2] != tensor_storage.ne[2] || + real->ne[3] != tensor_storage.ne[3]) { + LOG_ERROR( + "tensor '%s' has wrong shape in model file: " + "got [%d, %d, %d, %d], expected [%d, %d, %d, %d]", + name.c_str(), + (int)tensor_storage.ne[0], (int)tensor_storage.ne[1], (int)tensor_storage.ne[2], (int)tensor_storage.ne[3], + (int)real->ne[0], (int)real->ne[1], (int)real->ne[2], (int)real->ne[3]); + return false; + } + + *dst_tensor = real; + + return true; + }; + + bool success = load_tensors(on_new_tensor_cb, backend); + if (!success) { + LOG_ERROR("load tensors from file failed"); + return false; + } + + bool some_tensor_not_init = false; + + for (auto pair : tensors) { + if (pair.first.find("cond_stage_model.transformer.text_model.encoder.layers.23") != std::string::npos) { + continue; + } + + if (pair.first.find("alphas_cumprod") != std::string::npos) { + continue; + } + + if (tensor_names_in_file.find(pair.first) == tensor_names_in_file.end()) { + LOG_ERROR("tensor '%s' not in model file", pair.first.c_str()); + some_tensor_not_init = true; + } + } + + if (some_tensor_not_init) { + return false; + } + return true; +} + +std::vector> parse_tensor_type_rules(const std::string& tensor_type_rules) { + std::vector> result; + for (const auto& item : splitString(tensor_type_rules, ',')) { + if (item.size() == 0) + continue; + std::string::size_type pos = item.find('='); + if (pos == std::string::npos) { + LOG_WARN("ignoring invalid quant override \"%s\"", item.c_str()); + continue; + } + std::string tensor_pattern = item.substr(0, pos); + std::string type_name = item.substr(pos + 1); + + ggml_type tensor_type = GGML_TYPE_COUNT; + + if (type_name == "f32") { + tensor_type = GGML_TYPE_F32; + } else { + for (size_t i = 0; i < SD_TYPE_COUNT; i++) { + auto trait = ggml_get_type_traits((ggml_type)i); + if (trait->to_float && trait->type_size && type_name == trait->type_name) { + tensor_type = (ggml_type)i; + } + } + } + + if (tensor_type != GGML_TYPE_COUNT) { + result.emplace_back(tensor_pattern, tensor_type); + } else { + LOG_WARN("ignoring invalid quant override \"%s\"", item.c_str()); + } + } + return result; +} + +bool ModelLoader::tensor_should_be_converted(const TensorStorage& tensor_storage, ggml_type type) { + const std::string& name = tensor_storage.name; + if (type != GGML_TYPE_COUNT) { + if (ggml_is_quantized(type) && tensor_storage.ne[0] % ggml_blck_size(type) != 0) { + // Pass, do not convert + } else if (ends_with(name, ".bias")) { + // Pass, do not convert + } else if (ends_with(name, ".scale")) { + // Pass, do not convert + } else if (contains(name, "img_in.") || + contains(name, "txt_in.") || + contains(name, "time_in.") || + contains(name, "vector_in.") || + contains(name, "guidance_in.") || + contains(name, "final_layer.")) { + // Pass, do not convert. For FLUX + } else if (contains(name, "x_embedder.") || + contains(name, "t_embedder.") || + contains(name, "y_embedder.") || + contains(name, "pos_embed") || + contains(name, "context_embedder.")) { + // Pass, do not convert. For MMDiT + } else if (contains(name, "time_embed.") || contains(name, "label_emb.")) { + // Pass, do not convert. For Unet + } else { + return true; + } + } + return false; +} + +bool ModelLoader::save_to_gguf_file(const std::string& file_path, ggml_type type, const std::string& tensor_type_rules_str) { + auto backend = ggml_backend_cpu_init(); + size_t mem_size = 1 * 1024 * 1024; // for padding + mem_size += tensor_storages.size() * ggml_tensor_overhead(); + mem_size += get_params_mem_size(backend, type); + LOG_INFO("model tensors mem size: %.2fMB", mem_size / 1024.f / 1024.f); + ggml_context* ggml_ctx = ggml_init({mem_size, NULL, false}); + + gguf_context* gguf_ctx = gguf_init_empty(); + + auto tensor_type_rules = parse_tensor_type_rules(tensor_type_rules_str); + + auto on_new_tensor_cb = [&](const TensorStorage& tensor_storage, ggml_tensor** dst_tensor) -> bool { + const std::string& name = tensor_storage.name; + ggml_type tensor_type = tensor_storage.type; + ggml_type dst_type = type; + + for (const auto& tensor_type_rule : tensor_type_rules) { + std::regex pattern(tensor_type_rule.first); + if (std::regex_search(name, pattern)) { + dst_type = tensor_type_rule.second; + break; + } + } + + if (tensor_should_be_converted(tensor_storage, dst_type)) { + tensor_type = dst_type; + } + + ggml_tensor* tensor = ggml_new_tensor(ggml_ctx, tensor_type, tensor_storage.n_dims, tensor_storage.ne); + if (tensor == NULL) { + LOG_ERROR("ggml_new_tensor failed"); + return false; + } + ggml_set_name(tensor, name.c_str()); + + // LOG_DEBUG("%s %d %s %d[%d %d %d %d] %d[%d %d %d %d]", name.c_str(), + // ggml_nbytes(tensor), ggml_type_name(tensor_type), + // tensor_storage.n_dims, + // tensor_storage.ne[0], tensor_storage.ne[1], tensor_storage.ne[2], tensor_storage.ne[3], + // tensor->n_dims, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + + *dst_tensor = tensor; + + gguf_add_tensor(gguf_ctx, tensor); + + return true; + }; + + bool success = load_tensors(on_new_tensor_cb, backend); + ggml_backend_free(backend); + LOG_INFO("load tensors done"); + LOG_INFO("trying to save tensors to %s", file_path.c_str()); + if (success) { + gguf_write_to_file(gguf_ctx, file_path.c_str(), false); + } + ggml_free(ggml_ctx); + gguf_free(gguf_ctx); + return success; +} + +int64_t ModelLoader::get_params_mem_size(ggml_backend_t backend, ggml_type type) { + size_t alignment = 128; + if (backend != NULL) { + alignment = ggml_backend_get_alignment(backend); + } + int64_t mem_size = 0; + std::vector processed_tensor_storages; + for (auto& tensor_storage : tensor_storages) { + if (is_unused_tensor(tensor_storage.name)) { + continue; + } + preprocess_tensor(tensor_storage, processed_tensor_storages); + } + + for (auto& tensor_storage : processed_tensor_storages) { + if (tensor_should_be_converted(tensor_storage, type)) { + tensor_storage.type = type; + } + mem_size += tensor_storage.nbytes() + alignment; + } + + return mem_size; +} + +bool convert(const char* input_path, const char* vae_path, const char* output_path, sd_type_t output_type, const char* tensor_type_rules) { + ModelLoader model_loader; + + if (!model_loader.init_from_file(input_path)) { + LOG_ERROR("init model loader from file failed: '%s'", input_path); + return false; + } + + if (vae_path != NULL && strlen(vae_path) > 0) { + if (!model_loader.init_from_file(vae_path, "vae.")) { + LOG_ERROR("init model loader from file failed: '%s'", vae_path); + return false; + } + } + bool success = model_loader.save_to_gguf_file(output_path, (ggml_type)output_type, tensor_type_rules); + return success; +} diff --git a/model.h b/model.h new file mode 100644 index 000000000..95c66319d --- /dev/null +++ b/model.h @@ -0,0 +1,234 @@ +#ifndef __MODEL_H__ +#define __MODEL_H__ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ggml-backend.h" +#include "ggml.h" +#include "gguf.h" +#include "json.hpp" +#include "zip.h" + +#define SD_MAX_DIMS 5 + +enum SDVersion { + VERSION_SD1, + VERSION_SD1_INPAINT, + VERSION_SD2, + VERSION_SD2_INPAINT, + VERSION_SDXL, + VERSION_SDXL_INPAINT, + VERSION_SVD, + VERSION_SD3, + VERSION_FLUX, + VERSION_FLUX_FILL, + VERSION_COUNT, +}; + +static inline bool sd_version_is_flux(SDVersion version) { + if (version == VERSION_FLUX || version == VERSION_FLUX_FILL) { + return true; + } + return false; +} + +static inline bool sd_version_is_sd3(SDVersion version) { + if (version == VERSION_SD3) { + return true; + } + return false; +} + +static inline bool sd_version_is_sd1(SDVersion version) { + if (version == VERSION_SD1 || version == VERSION_SD1_INPAINT) { + return true; + } + return false; +} + +static inline bool sd_version_is_sd2(SDVersion version) { + if (version == VERSION_SD2 || version == VERSION_SD2_INPAINT) { + return true; + } + return false; +} + +static inline bool sd_version_is_sdxl(SDVersion version) { + if (version == VERSION_SDXL || version == VERSION_SDXL_INPAINT) { + return true; + } + return false; +} + +static inline bool sd_version_is_inpaint(SDVersion version) { + if (version == VERSION_SD1_INPAINT || version == VERSION_SD2_INPAINT || version == VERSION_SDXL_INPAINT || version == VERSION_FLUX_FILL) { + return true; + } + return false; +} + +static inline bool sd_version_is_dit(SDVersion version) { + if (sd_version_is_flux(version) || sd_version_is_sd3(version)) { + return true; + } + return false; +} + +enum PMVersion { + PM_VERSION_1, + PM_VERSION_2, +}; + +struct TensorStorage { + std::string name; + ggml_type type = GGML_TYPE_F32; + bool is_bf16 = false; + bool is_f8_e4m3 = false; + bool is_f8_e5m2 = false; + int64_t ne[SD_MAX_DIMS] = {1, 1, 1, 1, 1}; + int n_dims = 0; + + size_t file_index = 0; + int index_in_zip = -1; // >= means stored in a zip file + size_t offset = 0; // offset in file + + TensorStorage() = default; + + TensorStorage(const std::string& name, ggml_type type, int64_t* ne, int n_dims, size_t file_index, size_t offset = 0) + : name(name), type(type), n_dims(n_dims), file_index(file_index), offset(offset) { + for (int i = 0; i < n_dims; i++) { + this->ne[i] = ne[i]; + } + } + + int64_t nelements() const { + int64_t n = 1; + for (int i = 0; i < SD_MAX_DIMS; i++) { + n *= ne[i]; + } + return n; + } + + int64_t nbytes() const { + return nelements() * ggml_type_size(type) / ggml_blck_size(type); + } + + int64_t nbytes_to_read() const { + if (is_bf16 || is_f8_e4m3 || is_f8_e5m2) { + return nbytes() / 2; + } else { + return nbytes(); + } + } + + void unsqueeze() { + if (n_dims == 2) { + n_dims = 4; + ne[3] = ne[1]; + ne[2] = ne[0]; + ne[1] = 1; + ne[0] = 1; + } + } + + std::vector chunk(size_t n) { + std::vector chunks; + size_t chunk_size = nbytes_to_read() / n; + // printf("%d/%d\n", chunk_size, nbytes_to_read()); + reverse_ne(); + for (int i = 0; i < n; i++) { + TensorStorage chunk_i = *this; + chunk_i.ne[0] = ne[0] / n; + chunk_i.offset = offset + i * chunk_size; + chunk_i.reverse_ne(); + chunks.push_back(chunk_i); + } + reverse_ne(); + return chunks; + } + + void reverse_ne() { + int64_t new_ne[SD_MAX_DIMS] = {1, 1, 1, 1, 1}; + for (int i = 0; i < n_dims; i++) { + new_ne[i] = ne[n_dims - 1 - i]; + } + for (int i = 0; i < n_dims; i++) { + ne[i] = new_ne[i]; + } + } + + std::string to_string() const { + std::stringstream ss; + const char* type_name = ggml_type_name(type); + if (is_bf16) { + type_name = "bf16"; + } else if (is_f8_e4m3) { + type_name = "f8_e4m3"; + } else if (is_f8_e5m2) { + type_name = "f8_e5m2"; + } + ss << name << " | " << type_name << " | "; + ss << n_dims << " ["; + for (int i = 0; i < SD_MAX_DIMS; i++) { + ss << ne[i]; + if (i != SD_MAX_DIMS - 1) { + ss << ", "; + } + } + ss << "]"; + return ss.str(); + } +}; + +typedef std::function on_new_tensor_cb_t; + +class ModelLoader { +protected: + std::vector file_paths_; + std::vector tensor_storages; + + bool parse_data_pkl(uint8_t* buffer, + size_t buffer_size, + zip_t* zip, + std::string dir, + size_t file_index, + const std::string prefix); + + bool init_from_gguf_file(const std::string& file_path, const std::string& prefix = ""); + bool init_from_safetensors_file(const std::string& file_path, const std::string& prefix = ""); + bool init_from_ckpt_file(const std::string& file_path, const std::string& prefix = ""); + bool init_from_diffusers_file(const std::string& file_path, const std::string& prefix = ""); + +public: + std::map tensor_storages_types; + + bool init_from_file(const std::string& file_path, const std::string& prefix = ""); + bool model_is_unet(); + SDVersion get_sd_version(); + ggml_type get_sd_wtype(); + ggml_type get_conditioner_wtype(); + ggml_type get_diffusion_model_wtype(); + ggml_type get_vae_wtype(); + void set_wtype_override(ggml_type wtype, std::string prefix = ""); + bool load_tensors(on_new_tensor_cb_t on_new_tensor_cb, ggml_backend_t backend); + bool load_tensors(std::map& tensors, + ggml_backend_t backend, + std::set ignore_tensors = {}); + + bool save_to_gguf_file(const std::string& file_path, ggml_type type, const std::string& tensor_type_rules); + bool tensor_should_be_converted(const TensorStorage& tensor_storage, ggml_type type); + int64_t get_params_mem_size(ggml_backend_t backend, ggml_type type = GGML_TYPE_COUNT); + ~ModelLoader() = default; + + static std::string load_merges(); + static std::string load_t5_tokenizer_json(); +}; + +#endif // __MODEL_H__ diff --git a/models/.gitignore b/models/.gitignore deleted file mode 100644 index 756d38c5a..000000000 --- a/models/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.bin -*.ckpt -*.safetensor -*.log \ No newline at end of file diff --git a/models/README.md b/models/README.md deleted file mode 100644 index bb1bab387..000000000 --- a/models/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# Model Convert Script - -## Requirements - -- vocab.json, from https://huggingface.co/openai/clip-vit-large-patch14/raw/main/vocab.json - - -```shell -pip install -r requirements.txt -``` - -## Usage -``` -usage: convert.py [-h] [--out_type {f32,f16,q4_0,q4_1,q5_0,q5_1,q8_0}] [--out_file OUT_FILE] model_path - -Convert Stable Diffuison model to GGML compatible file format - -positional arguments: - model_path model file path (*.pth, *.pt, *.ckpt, *.safetensors) - -options: - -h, --help show this help message and exit - --out_type {f32,f16,q4_0,q4_1,q5_0,q5_1,q8_0} - output format (default: based on input) - --out_file OUT_FILE path to write to; default: based on input and current working directory -``` diff --git a/models/convert.py b/models/convert.py deleted file mode 100644 index 3750e049b..000000000 --- a/models/convert.py +++ /dev/null @@ -1,264 +0,0 @@ -import struct -import json -import os - -import numpy as np -import torch -import safetensors.torch - -this_file_dir = os.path.dirname(__file__) -vocab_dir = this_file_dir - -ggml_ftype_str_to_int = { - "f32": 0, - "f16": 1, - "q4_0": 2, - "q4_1": 3, - "q5_0": 8, - "q5_1": 9, - "q8_0": 7 -} - -ggml_ttype_str_to_int = { - "f32": 0, - "f16": 1, - "q4_0": 2, - "q4_1": 3, - "q5_0": 6, - "q5_1": 7, - "q8_0": 8 -} - -QK4_0 = 32 -def quantize_q4_0(x): - assert x.shape[-1] % QK4_0 == 0 - x = x.reshape(-1, QK4_0) - max = np.take_along_axis(x, np.argmax(np.abs(x), axis=-1)[:, np.newaxis], axis=-1) - d = max / -8 - qs = ((x / d) + 8).round().clip(min=0, max=15).astype(np.int8) - half = QK4_0 // 2 - qs = qs[:, :half] | (qs[:, half:] << 4) - d = d.astype(np.float16).view(np.int8) - y = np.concatenate((d, qs), axis=-1) - return y - -QK4_1 = 32 -def quantize_q4_1(x): - assert x.shape[-1] % QK4_1 == 0 - x = x.reshape(-1, QK4_1) - min = np.min(x, axis=-1, keepdims=True) - max = np.max(x, axis=-1, keepdims=True) - d = (max - min) / ((1 << 4) - 1) - qs = ((x - min) / d).round().clip(min=0, max=15).astype(np.int8) - half = QK4_1 // 2 - qs = qs[:, :half] | (qs[:, half:] << 4) - d = d.astype(np.float16).view(np.int8) - m = min.astype(np.float16).view(np.int8) - y = np.concatenate((d, m, qs), axis=-1) - return y - -QK5_0 = 32 -def quantize_q5_0(x): - assert x.shape[1] % QK5_0 == 0 - x = x.reshape(-1, QK5_0) - max = np.take_along_axis(x, np.argmax(np.abs(x), axis=-1)[:, np.newaxis], axis=-1) - d = max / -16 - xi = ((x / d) + 16).round().clip(min=0, max=31).astype(np.int8) - half = QK5_0 // 2 - qs = (xi[:, :half] & 0x0F) | (xi[:, half:] << 4) - qh = np.zeros(qs.shape[:-1], dtype=np.int32) - for i in range(QK5_0): - qh |= ((xi[:, i] & 0x10) >> 4).astype(np.int32) << i - d = d.astype(np.float16).view(np.int8) - qh = qh[..., np.newaxis].view(np.int8) - y = np.concatenate((d, qh, qs), axis=-1) - return y - -QK5_1 = 32 -def quantize_q5_1(x): - assert x.shape[-1] % QK5_1 == 0 - x = x.reshape(-1, QK5_1) - min = np.min(x, axis=-1, keepdims=True) - max = np.max(x, axis=-1, keepdims=True) - d = (max - min) / ((1 << 5) - 1) - xi = ((x - min) / d).round().clip(min=0, max=31).astype(np.int8) - half = QK5_1//2 - qs = (xi[:, :half] & 0x0F) | (xi[:, half:] << 4) - qh = np.zeros(xi.shape[:-1], dtype=np.int32) - for i in range(QK5_1): - qh |= ((xi[:, i] & 0x10) >> 4).astype(np.int32) << i - d = d.astype(np.float16).view(np.int8) - m = min.astype(np.float16).view(np.int8) - qh = qh[..., np.newaxis].view(np.int8) - ndarray = np.concatenate((d, m, qh, qs), axis=-1) - return ndarray - -QK8_0 = 32 -def quantize_q8_0(x): - assert x.shape[-1] % QK8_0 == 0 - x = x.reshape(-1, QK8_0) - amax = np.max(np.abs(x), axis=-1, keepdims=True) - d = amax / ((1 << 7) - 1) - qs = (x / d).round().clip(min=-128, max=127).astype(np.int8) - d = d.astype(np.float16).view(np.int8) - y = np.concatenate((d, qs), axis=-1) - return y - -# copy from https://github.com/openai/CLIP/blob/main/clip/simple_tokenizer.py#L16 -def bytes_to_unicode(): - """ - Returns list of utf-8 byte and a corresponding list of unicode strings. - The reversible bpe codes work on unicode strings. - This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. - When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. - This is a significant percentage of your normal, say, 32K bpe vocab. - To avoid that, we want lookup tables between utf-8 bytes and unicode strings. - And avoids mapping to whitespace/control characters the bpe code barfs on. - """ - bs = list(range(ord("!"), ord("~")+1))+list(range(ord("Ā”"), ord("¬")+1))+list(range(ord("Ā®"), ord("Ćæ")+1)) - cs = bs[:] - n = 0 - for b in range(2**8): - if b not in bs: - bs.append(b) - cs.append(2**8+n) - n += 1 - cs = [chr(n) for n in cs] - return dict(zip(bs, cs)) - -def load_model_from_file(model_path): - print("loading model from {}".format(model_path)) - if model_path.lower().endswith(".safetensors"): - pl_sd = safetensors.torch.load_file(model_path, device="cpu") - else: - pl_sd = torch.load(model_path, map_location="cpu") - state_dict = pl_sd["state_dict"] if "state_dict" in pl_sd else pl_sd - print("loading model from {} completed".format(model_path)) - return state_dict - -def get_alpha_comprod(linear_start=0.00085, linear_end=0.0120, timesteps=1000): - betas = torch.linspace(linear_start ** 0.5, linear_end ** 0.5, timesteps, dtype=torch.float32) ** 2 - alphas = 1. - betas - alphas_cumprod = np.cumprod(alphas.numpy(), axis=0) - return torch.tensor(alphas_cumprod) - -unused_tensors = [ - "betas", - "alphas_cumprod_prev", - "sqrt_alphas_cumprod", - "sqrt_one_minus_alphas_cumprod", - "log_one_minus_alphas_cumprod", - "sqrt_recip_alphas_cumprod", - "sqrt_recipm1_alphas_cumprod", - "posterior_variance", - "posterior_log_variance_clipped", - "posterior_mean_coef1", - "posterior_mean_coef2", - "cond_stage_model.transformer.text_model.embeddings.position_ids", - "model_ema.decay", - "model_ema.num_updates" -] - -def convert(model_path, out_type = None, out_file=None): - # load model - with open(os.path.join(vocab_dir, "vocab.json"), encoding="utf-8") as f: - clip_vocab = json.load(f) - - state_dict = load_model_from_file(model_path) - alphas_cumprod = state_dict.get("alphas_cumprod") - if alphas_cumprod != None: - # print((np.abs(get_alpha_comprod().numpy() - alphas_cumprod.numpy()) < 0.000001).all()) - pass - else: - print("no alphas_cumprod in file, generate new one") - alphas_cumprod = get_alpha_comprod() - state_dict["alphas_cumprod"] = alphas_cumprod - - - # output option - if out_type == None: - weight = state_dict["cond_stage_model.transformer.text_model.encoder.layers.0.self_attn.k_proj.weight"].numpy() - if weight.dtype == np.float32: - out_type = "f32" - elif weight.dtype == np.float16: - out_type = "f16" - if out_file == None: - out_file = os.path.splitext(os.path.basename(model_path))[0] + f"-ggml-model-{out_type}.bin" - out_file = os.path.join(os.getcwd(), out_file) - print(f"Saving GGML compatible file to {out_file}") - - # convert and save - with open(out_file, "wb") as file: - # magic: ggml in hex - file.write(struct.pack("i", 0x67676D6C)) - # out type - file.write(struct.pack("i", ggml_ftype_str_to_int[out_type])) - - # vocab - byte_encoder = bytes_to_unicode() - byte_decoder = {v: k for k, v in byte_encoder.items()} - file.write(struct.pack("i", len(clip_vocab))) - for key in clip_vocab: - text = bytearray([byte_decoder[c] for c in key]) - file.write(struct.pack("i", len(text))) - file.write(text) - - # weights - for name in state_dict.keys(): - if not isinstance(state_dict[name], torch.Tensor): - continue - if name in unused_tensors: - continue - data = state_dict[name].numpy() - - n_dims = len(data.shape) - shape = data.shape - old_type = data.dtype - - ttype = "f32" - if n_dims == 4: - data = data.astype(np.float16) - ttype = "f16" - elif n_dims == 2 and name[-7:] == ".weight": - if out_type == "f32": - data = data.astype(np.float32) - elif out_type == "f16": - data = data.astype(np.float16) - elif out_type == "q4_0": - data = quantize_q4_0(data) - elif out_type == "q4_1": - data = quantize_q4_1(data) - elif out_type == "q5_0": - data = quantize_q5_0(data) - elif out_type == "q5_1": - data = quantize_q5_1(data) - elif out_type == "q8_0": - data = quantize_q8_0(data) - else: - raise Exception("invalid out_type {}".format(out_type)) - ttype = out_type - else: - data = data.astype(np.float32) - ttype = "f32" - - print("Processing tensor: {} with shape {}, {} -> {}".format(name, data.shape, old_type, ttype)) - - # header - name_bytes = name.encode("utf-8") - file.write(struct.pack("iii", n_dims, len(name_bytes), ggml_ttype_str_to_int[ttype])) - for i in range(n_dims): - file.write(struct.pack("i", shape[n_dims - 1 - i])) - file.write(name_bytes) - # data - data.tofile(file) - print("Convert done") - print(f"Saved GGML compatible file to {out_file}") - -if __name__ == "__main__": - import argparse - parser = argparse.ArgumentParser(description="Convert Stable Diffuison model to GGML compatible file format") - parser.add_argument("--out_type", choices=["f32", "f16", "q4_0", "q4_1", "q5_0", "q5_1", "q8_0"], help="output format (default: based on input)") - parser.add_argument("--out_file", help="path to write to; default: based on input and current working directory") - parser.add_argument("model_path", help="model file path (*.pth, *.pt, *.ckpt, *.safetensors)") - args = parser.parse_args() - convert(args.model_path, args.out_type, args.out_file) diff --git a/models/requirements.txt b/models/requirements.txt deleted file mode 100644 index ecd3ed65d..000000000 --- a/models/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -numpy -torch -safetensors -pytorch_lightning \ No newline at end of file diff --git a/models/vocab.json b/models/vocab.json deleted file mode 100644 index 4297ea6a8..000000000 --- a/models/vocab.json +++ /dev/null @@ -1 +0,0 @@ -{"!": 0, "\"": 1, "#": 2, "$": 3, "%": 4, "&": 5, "'": 6, "(": 7, ")": 8, "*": 9, "+": 10, ",": 11, "-": 12, ".": 13, "/": 14, "0": 15, "1": 16, "2": 17, "3": 18, "4": 19, "5": 20, "6": 21, "7": 22, "8": 23, "9": 24, ":": 25, ";": 26, "<": 27, "=": 28, ">": 29, "?": 30, "@": 31, "A": 32, "B": 33, "C": 34, "D": 35, "E": 36, "F": 37, "G": 38, "H": 39, "I": 40, "J": 41, "K": 42, "L": 43, "M": 44, "N": 45, "O": 46, "P": 47, "Q": 48, "R": 49, "S": 50, "T": 51, "U": 52, "V": 53, "W": 54, "X": 55, "Y": 56, "Z": 57, "[": 58, "\\": 59, "]": 60, "^": 61, "_": 62, "`": 63, "a": 64, "b": 65, "c": 66, "d": 67, "e": 68, "f": 69, "g": 70, "h": 71, "i": 72, "j": 73, "k": 74, "l": 75, "m": 76, "n": 77, "o": 78, "p": 79, "q": 80, "r": 81, "s": 82, "t": 83, "u": 84, "v": 85, "w": 86, "x": 87, "y": 88, "z": 89, "{": 90, "|": 91, "}": 92, "~": 93, "Ā”": 94, "Ā¢": 95, "Ā£": 96, "¤": 97, "Ā„": 98, "¦": 99, "§": 100, "ĀØ": 101, "Ā©": 102, "ĀŖ": 103, "Ā«": 104, "¬": 105, "Ā®": 106, "ĀÆ": 107, "°": 108, "±": 109, "²": 110, "³": 111, "Ā“": 112, "µ": 113, "¶": 114, "Ā·": 115, "Āø": 116, "¹": 117, "Āŗ": 118, "Ā»": 119, "¼": 120, "½": 121, "¾": 122, "Āæ": 123, "ƀ": 124, "Ɓ": 125, "Ƃ": 126, "ƃ": 127, "Ƅ": 128, "ƅ": 129, "Ɔ": 130, "Ƈ": 131, "ƈ": 132, "Ɖ": 133, "Ê": 134, "Ƌ": 135, "Ì": 136, "ƍ": 137, "Ǝ": 138, "Ə": 139, "Ɛ": 140, "Ƒ": 141, "ƒ": 142, "Ɠ": 143, "Ɣ": 144, "ƕ": 145, "Ɩ": 146, "Ɨ": 147, "Ƙ": 148, "ƙ": 149, "Ú": 150, "ƛ": 151, "Ü": 152, "Ɲ": 153, "ƞ": 154, "ß": 155, "Ć ": 156, "Ć”": 157, "Ć¢": 158, "Ć£": 159, "Ƥ": 160, "Ć„": 161, "Ʀ": 162, "Ƨ": 163, "ĆØ": 164, "Ć©": 165, "ĆŖ": 166, "Ć«": 167, "Ƭ": 168, "Ć­": 169, "Ć®": 170, "ĆÆ": 171, "ư": 172, "Ʊ": 173, "ò": 174, "ó": 175, "Ć“": 176, "Ƶ": 177, "ƶ": 178, "Ć·": 179, "Ćø": 180, "ù": 181, "Ćŗ": 182, "Ć»": 183, "ü": 184, "ý": 185, "þ": 186, "Ćæ": 187, "Ā": 188, "ā": 189, "Ă": 190, "ă": 191, "Ą": 192, "ą": 193, "Ć": 194, "ć": 195, "Ĉ": 196, "ĉ": 197, "Ċ": 198, "ċ": 199, "Č": 200, "č": 201, "Ď": 202, "ď": 203, "Đ": 204, "đ": 205, "Ē": 206, "ē": 207, "Ĕ": 208, "ĕ": 209, "Ė": 210, "ė": 211, "Ę": 212, "ę": 213, "Ě": 214, "ě": 215, "Ĝ": 216, "ĝ": 217, "Ğ": 218, "ğ": 219, "Ä ": 220, "Ä”": 221, "Ä¢": 222, "Ä£": 223, "Ĥ": 224, "Ä„": 225, "Ħ": 226, "ħ": 227, "ÄØ": 228, "Ä©": 229, "ÄŖ": 230, "Ä«": 231, "Ĭ": 232, "Ä­": 233, "Ä®": 234, "ÄÆ": 235, "İ": 236, "ı": 237, "IJ": 238, "ij": 239, "Ä“": 240, "ĵ": 241, "Ķ": 242, "Ä·": 243, "Äø": 244, "Ĺ": 245, "Äŗ": 246, "Ä»": 247, "ļ": 248, "Ľ": 249, "ľ": 250, "Äæ": 251, "ŀ": 252, "Ł": 253, "ł": 254, "Ń": 255, "!": 256, "\"": 257, "#": 258, "$": 259, "%": 260, "&": 261, "'": 262, "(": 263, ")": 264, "*": 265, "+": 266, ",": 267, "-": 268, ".": 269, "/": 270, "0": 271, "1": 272, "2": 273, "3": 274, "4": 275, "5": 276, "6": 277, "7": 278, "8": 279, "9": 280, ":": 281, ";": 282, "<": 283, "=": 284, ">": 285, "?": 286, "@": 287, "A": 288, "B": 289, "C": 290, "D": 291, "E": 292, "F": 293, "G": 294, "H": 295, "I": 296, "J": 297, "K": 298, "L": 299, "M": 300, "N": 301, "O": 302, "P": 303, "Q": 304, "R": 305, "S": 306, "T": 307, "U": 308, "V": 309, "W": 310, "X": 311, "Y": 312, "Z": 313, "[": 314, "\\": 315, "]": 316, "^": 317, "_": 318, "`": 319, "a": 320, "b": 321, "c": 322, "d": 323, "e": 324, "f": 325, "g": 326, "h": 327, "i": 328, "j": 329, "k": 330, "l": 331, "m": 332, "n": 333, "o": 334, "p": 335, "q": 336, "r": 337, "s": 338, "t": 339, "u": 340, "v": 341, "w": 342, "x": 343, "y": 344, "z": 345, "{": 346, "|": 347, "}": 348, "~": 349, "Ā”": 350, "Ā¢": 351, "Ā£": 352, "¤": 353, "Ā„": 354, "¦": 355, "§": 356, "ĀØ": 357, "Ā©": 358, "ĀŖ": 359, "Ā«": 360, "¬": 361, "Ā®": 362, "ĀÆ": 363, "°": 364, "±": 365, "²": 366, "³": 367, "Ā“": 368, "µ": 369, "¶": 370, "Ā·": 371, "Āø": 372, "¹": 373, "Āŗ": 374, "Ā»": 375, "¼": 376, "½": 377, "¾": 378, "Āæ": 379, "ƀ": 380, "Ɓ": 381, "Ƃ": 382, "ƃ": 383, "Ƅ": 384, "ƅ": 385, "Ɔ": 386, "Ƈ": 387, "ƈ": 388, "Ɖ": 389, "Ê": 390, "Ƌ": 391, "Ì": 392, "ƍ": 393, "Ǝ": 394, "Ə": 395, "Ɛ": 396, "Ƒ": 397, "ƒ": 398, "Ɠ": 399, "Ɣ": 400, "ƕ": 401, "Ɩ": 402, "Ɨ": 403, "Ƙ": 404, "ƙ": 405, "Ú": 406, "ƛ": 407, "Ü": 408, "Ɲ": 409, "ƞ": 410, "ß": 411, "Ć ": 412, "Ć”": 413, "Ć¢": 414, "Ć£": 415, "Ƥ": 416, "Ć„": 417, "Ʀ": 418, "Ƨ": 419, "ĆØ": 420, "Ć©": 421, "ĆŖ": 422, "Ć«": 423, "Ƭ": 424, "Ć­": 425, "Ć®": 426, "ĆÆ": 427, "ư": 428, "Ʊ": 429, "ò": 430, "ó": 431, "Ć“": 432, "Ƶ": 433, "ƶ": 434, "Ć·": 435, "Ćø": 436, "ù": 437, "Ćŗ": 438, "Ć»": 439, "ü": 440, "ý": 441, "þ": 442, "Ćæ": 443, "Ā": 444, "ā": 445, "Ă": 446, "ă": 447, "Ą": 448, "ą": 449, "Ć": 450, "ć": 451, "Ĉ": 452, "ĉ": 453, "Ċ": 454, "ċ": 455, "Č": 456, "č": 457, "Ď": 458, "ď": 459, "Đ": 460, "đ": 461, "Ē": 462, "ē": 463, "Ĕ": 464, "ĕ": 465, "Ė": 466, "ė": 467, "Ę": 468, "ę": 469, "Ě": 470, "ě": 471, "Ĝ": 472, "ĝ": 473, "Ğ": 474, "ğ": 475, "Ä ": 476, "Ä”": 477, "Ä¢": 478, "Ä£": 479, "Ĥ": 480, "Ä„": 481, "Ħ": 482, "ħ": 483, "ÄØ": 484, "Ä©": 485, "ÄŖ": 486, "Ä«": 487, "Ĭ": 488, "Ä­": 489, "Ä®": 490, "ÄÆ": 491, "İ": 492, "ı": 493, "IJ": 494, "ij": 495, "Ä“": 496, "ĵ": 497, "Ķ": 498, "Ä·": 499, "Äø": 500, "Ĺ": 501, "Äŗ": 502, "Ä»": 503, "ļ": 504, "Ľ": 505, "ľ": 506, "Äæ": 507, "ŀ": 508, "Ł": 509, "ł": 510, "Ń": 511, "in": 512, "th": 513, "an": 514, "re": 515, "ar": 516, "er": 517, "the": 518, "ing": 519, "ou": 520, "on": 521, "st": 522, "or": 523, "en": 524, "on": 525, "al": 526, "at": 527, "er": 528, "it": 529, "in": 530, "to": 531, "ro": 532, "is": 533, "le": 534, "ic": 535, "at": 536, "and": 537, "ed": 538, "of": 539, "ch": 540, "or": 541, "es": 542, "il": 543, "el": 544, "st": 545, "ac": 546, "om": 547, "am": 548, "lo": 549, "an": 550, "ay": 551, "sh": 552, "ri": 553, "li": 554, "ti": 555, "for": 556, "ne": 557, "ðŁ": 558, "ra": 559, "ha": 560, "de": 561, "ol": 562, "ve": 563, "si": 564, "ur": 565, "al": 566, "se": 567, "'s": 568, "un": 569, "di": 570, "be": 571, "la": 572, "wh": 573, "oo": 574, "day": 575, "en": 576, "ma": 577, "no": 578, "le": 579, "to": 580, "our": 581, "ir": 582, "gh": 583, "wit": 584, "it": 585, "yo": 586, "as": 587, "sp": 588, "this": 589, "ts": 590, "ati": 591, "you": 592, "with": 593, "ad": 594, "is": 595, "ab": 596, "ly": 597, "we": 598, "the": 599, "te": 600, "as": 601, "ag": 602, "vi": 603, "pp": 604, "su": 605, "ho": 606, "my": 607, "..": 608, "bu": 609, "com": 610, "se": 611, "ers": 612, "me": 613, "me": 614, "all": 615, "con": 616, "mo": 617, "ke": 618, "ge": 619, "out": 620, "ent": 621, "co": 622, "fe": 623, "ver": 624, "ar": 625, "fro": 626, "au": 627, "po": 628, "ce": 629, "ght": 630, "are": 631, "ss": 632, "from": 633, "ch": 634, "tr": 635, "oun": 636, "one": 637, "by": 638, "do": 639, "th": 640, "wor": 641, "ere": 642, "ke": 643, "pro": 644, "for": 645, "ds": 646, "bo": 647, "ta": 648, "we": 649, "go": 650, "he": 651, "ter": 652, "ing": 653, "de": 654, "be": 655, "ation": 656, "mor": 657, "ay": 658, "ex": 659, "ill": 660, "pe": 661, "ks": 662, "sc": 663, "lu": 664, "fu": 665, "qu": 666, "ver": 667, "ðŁĺ": 668, "ju": 669, "mu": 670, "ate": 671, "and": 672, "ve": 673, "king": 674, "mar": 675, "op": 676, "hi": 677, "...": 678, "pre": 679, "ad": 680, "ru": 681, "that": 682, "jo": 683, "of": 684, "ce": 685, "new": 686, "am": 687, "ap": 688, "gre": 689, "ss": 690, "du": 691, "now": 692, "ye": 693, "ting": 694, "your": 695, "ity": 696, "ni": 697, "ci": 698, "par": 699, "gu": 700, "fi": 701, "af": 702, "per": 703, "ter": 704, "up": 705, "so": 706, "gi": 707, "ons": 708, "gr": 709, "ge": 710, "br": 711, "pl": 712, "'t": 713, "mi": 714, "ine": 715, "wee": 716, "bi": 717, "us": 718, "sho": 719, "have": 720, "today": 721, "av": 722, "man": 723, "ent": 724, "ack": 725, "ure": 726, "our": 727, "âĢ": 728, "cu": 729, "ld": 730, "loo": 731, "im": 732, "ice": 733, "som": 734, "fin": 735, "red": 736, "ren": 737, "ood": 738, "was": 739, "tion": 740, "pi": 741, "ir": 742, "ther": 743, "ty": 744, "ph": 745, "ard": 746, "ec": 747, "!!": 748, "mon": 749, "more": 750, "will": 751, "tra": 752, "can": 753, "col": 754, "pu": 755, "te": 756, "wn": 757, "mb": 758, "so": 759, "iti": 760, "just": 761, "ning": 762, "here": 763, "tu": 764, "pa": 765, "pr": 766, "but": 767, "what": 768, "ally": 769, "fir": 770, "min": 771, "ca": 772, "ant": 773, "sa": 774, "ted": 775, "ev": 776, "ment": 777, "fa": 778, "get": 779, "ame": 780, "about": 781, "gra": 782, "not": 783, "happ": 784, "ays": 785, "man": 786, "his": 787, "time": 788, "like": 789, "gh": 790, "has": 791, "than": 792, "love": 793, "art": 794, "ste": 795, "ding": 796, "he": 797, "cre": 798, "ws": 799, "wat": 800, "der": 801, "ite": 802, "ser": 803, "ace": 804, "age": 805, "end": 806, "str": 807, "aw": 808, "stor": 809, "re": 810, "car": 811, "ell": 812, "all": 813, "ps": 814, "fri": 815, "pho": 816, "por": 817, "do": 818, "ak": 819, "wi": 820, "fre": 821, "who": 822, "shi": 823, "boo": 824, "son": 825, "ell": 826, "when": 827, "ill": 828, "how": 829, "great": 830, "win": 831, "el": 832, "bl": 833, "ssi": 834, "ali": 835, "some": 836, "ðŁē": 837, "ton": 838, "der": 839, "les": 840, "pla": 841, "ĆÆĀø": 842, "ed": 843, "sch": 844, "hu": 845, "ong": 846, "don": 847, "ki": 848, "sh": 849, "ann": 850, "cor": 851, "..": 852, "ound": 853, "az": 854, "ine": 855, "ary": 856, "ful": 857, "stu": 858, "ould": 859, "sti": 860, "go": 861, "see": 862, "able": 863, "ars": 864, "ll": 865, "mis": 866, "ber": 867, "ck": 868, "wa": 869, "ents": 870, "no": 871, "sig": 872, "fe": 873, "first": 874, "et": 875, "spe": 876, "ack": 877, "if": 878, "ous": 879, "'m": 880, "ster": 881, "app": 882, "ang": 883, "ance": 884, "ans": 885, "good": 886, "bre": 887, "ever": 888, "they": 889, "tic": 890, "come": 891, "off": 892, "back": 893, "ase": 894, "ings": 895, "old": 896, "ight": 897, "fo": 898, "her": 899, "happy": 900, "pic": 901, "its": 902, "ving": 903, "us": 904, "mat": 905, "hom": 906, "dy": 907, "em": 908, "sk": 909, "ying": 910, "their": 911, "led": 912, "ry": 913, "ul": 914, "har": 915, "ck": 916, "ton": 917, "onal": 918, "hel": 919, "ric": 920, "bir": 921, "vie": 922, "way": 923, "tri": 924, "da": 925, "ple": 926, "bro": 927, "sto": 928, "ool": 929, "night": 930, "tru": 931, "ba": 932, "read": 933, "res": 934, "year": 935, "fr": 936, "tor": 937, "als": 938, "coun": 939, "cla": 940, "ture": 941, "vel": 942, "ated": 943, "lec": 944, "end": 945, "thing": 946, "vo": 947, "ici": 948, "best": 949, "can": 950, "work": 951, "last": 952, "after": 953, "ence": 954, "pri": 955, "pe": 956, "es": 957, "il": 958, "â̦": 959, "dre": 960, "ys": 961, "over": 962, "ies": 963, "ðŁij": 964, "comm": 965, "tw": 966, "ink": 967, "sun": 968, "cl": 969, "life": 970, "tt": 971, "ach": 972, "land": 973, "sy": 974, "tre": 975, "tal": 976, "pol": 977, "sm": 978, "duc": 979, "sal": 980, "ft": 981, "'re": 982, "che": 983, "war": 984, "tur": 985, "ations": 986, "ach": 987, "ms": 988, "ile": 989, "pm": 990, "ough": 991, "ate": 992, "star": 993, "week": 994, "!!!": 995, "clu": 996, "there": 997, "ner": 998, "tom": 999, "sel": 1000, "ï¸ı": 1001, "world": 1002, "ves": 1003, "cam": 1004, "got": 1005, "inter": 1006, "off": 1007, "um": 1008, "tonight": 1009, "other": 1010, "hou": 1011, "look": 1012, "je": 1013, "id": 1014, "sion": 1015, "beau": 1016, "att": 1017, "eli": 1018, "ort": 1019, "rec": 1020, "ff": 1021, "ster": 1022, "supp": 1023, "gen": 1024, "been": 1025, "ily": 1026, "team": 1027, "mm": 1028, "ic": 1029, "peop": 1030, "itt": 1031, "ats": 1032, "only": 1033, "mber": 1034, "eng": 1035, "bri": 1036, "mp": 1037, "know": 1038, "bur": 1039, "bar": 1040, "ins": 1041, "low": 1042, "she": 1043, "row": 1044, "âĿ": 1045, "tro": 1046, "people": 1047, "via": 1048, "low": 1049, "aga": 1050, "bet": 1051, "xt": 1052, "fac": 1053, "char": 1054, "ear": 1055, "wal": 1056, "sen": 1057, "fam": 1058, "ble": 1059, "nati": 1060, "ish": 1061, "nor": 1062, "game": 1063, "live": 1064, "sco": 1065, "ley": 1066, "don": 1067, "ick": 1068, "ball": 1069, "very": 1070, "these": 1071, "pan": 1072, "ia": 1073, "ating": 1074, "cr": 1075, "are": 1076, "gir": 1077, "make": 1078, "stre": 1079, "show": 1080, ".\"": 1081, "fl": 1082, "up": 1083, "dr": 1084, "thanks": 1085, "illi": 1086, "wom": 1087, "sts": 1088, "ig": 1089, "sur": 1090, "every": 1091, "cur": 1092, "view": 1093, "let": 1094, "into": 1095, "most": 1096, "na": 1097, "indi": 1098, "gar": 1099, "had": 1100, "sou": 1101, "ved": 1102, "ant": 1103, "ition": 1104, "made": 1105, "fol": 1106, "uni": 1107, "ited": 1108, "ðŁı": 1109, "ical": 1110, "thr": 1111, "ready": 1112, "chec": 1113, "dra": 1114, "kes": 1115, "book": 1116, "ep": 1117, "sic": 1118, "morning": 1119, "news": 1120, "cau": 1121, "ct": 1122, "well": 1123, "anc": 1124, "photo": 1125, "than": 1126, "ors": 1127, "birth": 1128, "gg": 1129, "out": 1130, "next": 1131, "some": 1132, "ening": 1133, "story": 1134, "chri": 1135, "down": 1136, "home": 1137, "ffe": 1138, "free": 1139, "da": 1140, "bor": 1141, "fil": 1142, "cial": 1143, "thank": 1144, "side": 1145, "lear": 1146, "que": 1147, "line": 1148, "ten": 1149, "ates": 1150, "years": 1151, "my": 1152, "photo": 1153, "beauti": 1154, "right": 1155, "nu": 1156, "form": 1157, "ship": 1158, "ban": 1159, "ther": 1160, "days": 1161, "gam": 1162, "ason": 1163, "gy": 1164, "ðŁİ": 1165, "birthday": 1166, "set": 1167, "ick": 1168, "et": 1169, "still": 1170, "coming": 1171, "take": 1172, "ðŁĩ": 1173, "bb": 1174, "sol": 1175, "son": 1176, "den": 1177, "ep": 1178, "music": 1179, "them": 1180, "den": 1181, "why": 1182, "foo": 1183, "cra": 1184, "amaz": 1185, "wn": 1186, "hol": 1187, "tting": 1188, "wr": 1189, "ue": 1190, "mag": 1191, "cro": 1192, "lan": 1193, "clo": 1194, "bra": 1195, "ak": 1196, "sing": 1197, "cal": 1198, "read": 1199, "'ve": 1200, "joh": 1201, "bab": 1202, "dri": 1203, "blo": 1204, "big": 1205, "eric": 1206, "int": 1207, "tor": 1208, "try": 1209, "la": 1210, "leg": 1211, "house": 1212, "mic": 1213, "val": 1214, "beautiful": 1215, "litt": 1216, "check": 1217, "new": 1218, "vers": 1219, "sw": 1220, "ari": 1221, "play": 1222, "her": 1223, "âĢĵ": 1224, "win": 1225, "ma": 1226, "congr": 1227, "school": 1228, "fun": 1229, ".@": 1230, "heal": 1231, "ich": 1232, "del": 1233, "where": 1234, "lon": 1235, "ket": 1236, "two": 1237, "much": 1238, "watch": 1239, "ven": 1240, "ded": 1241, "ast": 1242, "ked": 1243, "bas": 1244, "going": 1245, "mp": 1246, "ever": 1247, "ways": 1248, "roo": 1249, "desig": 1250, "ly": 1251, "sed": 1252, "top": 1253, "lin": 1254, "chan": 1255, "too": 1256, "iting": 1257, "dent": 1258, "ghts": 1259, "ty": 1260, "spo": 1261, "need": 1262, "blu": 1263, "inst": 1264, "being": 1265, "âĿ¤": 1266, "wel": 1267, "ls": 1268, "him": 1269, "may": 1270, "sting": 1271, "na": 1272, "ely": 1273, "little": 1274, "ga": 1275, "nat": 1276, "tomor": 1277, "mc": 1278, "hon": 1279, "want": 1280, "air": 1281, "pic": 1282, "americ": 1283, "per": 1284, "less": 1285, "week": 1286, "vel": 1287, "ah": 1288, "cap": 1289, "cham": 1290, "ger": 1291, "tim": 1292, "tomorrow": 1293, "ness": 1294, "state": 1295, "hal": 1296, "serv": 1297, "ze": 1298, "os": 1299, "pat": 1300, "vis": 1301, "exc": 1302, "sin": 1303, "ff": 1304, "city": 1305, "cen": 1306, "any": 1307, "bel": 1308, "summ": 1309, "tin": 1310, "would": 1311, "looking": 1312, "ko": 1313, "cele": 1314, "family": 1315, "mer": 1316, "pow": 1317, "help": 1318, "bus": 1319, "co": 1320, "cle": 1321, "self": 1322, "ens": 1323, "ics": 1324, "tho": 1325, "ani": 1326, "cho": 1327, "lead": 1328, "bs": 1329, "twee": 1330, "think": 1331, "fore": 1332, "chil": 1333, "vide": 1334, "did": 1335, "ale": 1336, "chi": 1337, "vil": 1338, "ends": 1339, "wing": 1340, "pas": 1341, "'ll": 1342, "vol": 1343, "sa": 1344, "gs": 1345, "many": 1346, "jec": 1347, "before": 1348, "graph": 1349, "ny": 1350, "uring": 1351, "wil": 1352, "dd": 1353, "buil": 1354, "fav": 1355, "sted": 1356, "tran": 1357, "ling": 1358, "oud": 1359, "dge": 1360, "fiel": 1361, "national": 1362, "sta": 1363, "cer": 1364, "were": 1365, "ina": 1366, "season": 1367, "cou": 1368, "ned": 1369, "amazing": 1370, "tions": 1371, "celebr": 1372, "ns": 1373, "ath": 1374, "head": 1375, "sday": 1376, "dar": 1377, "loc": 1378, "vin": 1379, "another": 1380, "goo": 1381, "sat": 1382, "ny": 1383, "join": 1384, "pres": 1385, "ses": 1386, "sing": 1387, "ana": 1388, "ining": 1389, "....": 1390, "cour": 1391, "ï¸ı": 1392, "act": 1393, "cause": 1394, "light": 1395, "ams": 1396, "ta": 1397, "bal": 1398, "fc": 1399, "high": 1400, "offici": 1401, "tt": 1402, "christ": 1403, "dic": 1404, "day": 1405, "ral": 1406, "hor": 1407, ":)": 1408, "visi": 1409, "nam": 1410, "ob": 1411, "mas": 1412, "ght": 1413, "really": 1414, "tun": 1415, "find": 1416, "through": 1417, "port": 1418, "ut": 1419, "tive": 1420, "sty": 1421, "ne": 1422, "ore": 1423, "ðŁĺĤ": 1424, "support": 1425, "never": 1426, "even": 1427, "ðŁĶ": 1428, "ha": 1429, "ya": 1430, "ld": 1431, "uk": 1432, "ran": 1433, "jam": 1434, "with": 1435, "medi": 1436, "des": 1437, "ney": 1438, "ching": 1439, "ale": 1440, "hy": 1441, "kin": 1442, "!!": 1443, "dy": 1444, "place": 1445, "also": 1446, "ble": 1447, "which": 1448, "black": 1449, "bli": 1450, "say": 1451, "park": 1452, "play": 1453, "ire": 1454, "video": 1455, "weekend": 1456, "ail": 1457, "key": 1458, "pt": 1459, "ward": 1460, "friday": 1461, "din": 1462, "iness": 1463, "gro": 1464, "ben": 1465, "always": 1466, "tball": 1467, "ago": 1468, "mil": 1469, "cy": 1470, "produc": 1471, "disc": 1472, "under": 1473, "please": 1474, "spor": 1475, "full": 1476, "ey": 1477, "ðŁĻ": 1478, "ise": 1479, "ities": 1480, "cat": 1481, "kno": 1482, "use": 1483, "fore": 1484, "ker": 1485, "art": 1486, "high": 1487, "open": 1488, "san": 1489, "ef": 1490, "ours": 1491, "shed": 1492, "stri": 1493, "dro": 1494, "again": 1495, "im": 1496, "ðŁĵ": 1497, "enjo": 1498, "fun": 1499, "getting": 1500, "pen": 1501, "ger": 1502, "cli": 1503, "any": 1504, "every": 1505, "eu": 1506, "women": 1507, "âľ": 1508, "est": 1509, "could": 1510, "ry": 1511, "\"@": 1512, "thou": 1513, "sha": 1514, "commun": 1515, "ber": 1516, "dents": 1517, "dis": 1518, "while": 1519, "away": 1520, "dio": 1521, "ham": 1522, "gla": 1523, "date": 1524, "ka": 1525, "miss": 1526, "unch": 1527, "won": 1528, "inf": 1529, "room": 1530, "ga": 1531, "real": 1532, "exper": 1533, "direc": 1534, "should": 1535, "spr": 1536, "gol": 1537, "long": 1538, "better": 1539, "ori": 1540, "ey": 1541, "ience": 1542, "ils": 1543, "zz": 1544, "han": 1545, "found": 1546, "vs": 1547, "âĻ": 1548, "post": 1549, "tic": 1550, "part": 1551, "men": 1552, "rence": 1553, "cess": 1554, "vic": 1555, "sil": 1556, "shop": 1557, "ðŁĺĤ": 1558, "food": 1559, "val": 1560, "stic": 1561, "you": 1562, "says": 1563, "elec": 1564, "star": 1565, "oc": 1566, "land": 1567, "id": 1568, "ction": 1569, "field": 1570, "sof": 1571, "start": 1572, "water": 1573, "friends": 1574, "ones": 1575, "ðŁĮ": 1576, "fla": 1577, "far": 1578, "white": 1579, "party": 1580, "inst": 1581, "grou": 1582, "tv": 1583, "everyone": 1584, "ment": 1585, "ja": 1586, "cha": 1587, "prin": 1588, "ants": 1589, "during": 1590, "lat": 1591, "lar": 1592, "west": 1593, "then": 1594, "ka": 1595, "youn": 1596, "insp": 1597, "inte": 1598, "ween": 1599, "visit": 1600, "against": 1601, "rele": 1602, "head": 1603, "ces": 1604, "town": 1605, "looks": 1606, "thre": 1607, "regi": 1608, "rent": 1609, "projec": 1610, "girl": 1611, "sear": 1612, "wo": 1613, "mom": 1614, "car": 1615, "hun": 1616, "publi": 1617, "di": 1618, "ple": 1619, "call": 1620, "cri": 1621, "um": 1622, "ford": 1623, "perfe": 1624, "friend": 1625, "hard": 1626, "ssion": 1627, "test": 1628, "playing": 1629, "around": 1630, "because": 1631, "kets": 1632, "meet": 1633, "satur": 1634, "arti": 1635, "work": 1636, "jun": 1637, "ven": 1638, "run": 1639, "member": 1640, "port": 1641, "super": 1642, "twit": 1643, "sam": 1644, "els": 1645, "tly": 1646, "adv": 1647, "ative": 1648, "ath": 1649, "sure": 1650, "avail": 1651, "lar": 1652, "squ": 1653, "ards": 1654, "event": 1655, "men": 1656, "ll": 1657, "over": 1658, "logy": 1659, "ital": 1660, "times": 1661, "mal": 1662, "back": 1663, "coo": 1664, "making": 1665, "stru": 1666, "âģ": 1667, "itu": 1668, "shar": 1669, "gan": 1670, "cas": 1671, "sn": 1672, "summer": 1673, "picture": 1674, "fan": 1675, "hin": 1676, "christmas": 1677, "cy": 1678, "proud": 1679, "champi": 1680, "design": 1681, "pping": 1682, "hope": 1683, "ca": 1684, "available": 1685, "may": 1686, "wed": 1687, "photograph": 1688, "special": 1689, "sale": 1690, "stop": 1691, "ery": 1692, "awe": 1693, "ality": 1694, "history": 1695, "ama": 1696, "presi": 1697, "bru": 1698, "working": 1699, "done": 1700, "dr": 1701, "ken": 1702, "feat": 1703, "wood": 1704, "atest": 1705, "sunday": 1706, "movi": 1707, "vely": 1708, "sle": 1709, "face": 1710, "spec": 1711, "students": 1712, "by": 1713, "ham": 1714, "spon": 1715, "business": 1716, "dat": 1717, "ie": 1718, "ip": 1719, "soci": 1720, "glo": 1721, "hand": 1722, "recor": 1723, "rs": 1724, "mee": 1725, "keep": 1726, "pur": 1727, "health": 1728, "she": 1729, "comple": 1730, "god": 1731, "davi": 1732, "collec": 1733, "list": 1734, "ra": 1735, "club": 1736, "ters": 1737, "inclu": 1738, "things": 1739, "plan": 1740, "âĺ": 1741, "john": 1742, "shing": 1743, "atul": 1744, "soon": 1745, "blue": 1746, "gor": 1747, "saturday": 1748, "won": 1749, "congratul": 1750, "see": 1751, "âĿ¤ï¸ı": 1752, "those": 1753, "ðŁĺį": 1754, "final": 1755, "dou": 1756, "ith": 1757, "own": 1758, "road": 1759, "tour": 1760, "ast": 1761, "india": 1762, "til": 1763, "nd": 1764, "fer": 1765, "favor": 1766, "sul": 1767, "learn": 1768, "fire": 1769, "just": 1770, "group": 1771, "ah": 1772, "rac": 1773, "body": 1774, "ur": 1775, "care": 1776, "Ć Āø": 1777, "plo": 1778, "oh": 1779, "pos": 1780, "give": 1781, "tech": 1782, "sub": 1783, "cent": 1784, "ering": 1785, "ym": 1786, "ility": 1787, "fic": 1788, "london": 1789, "vir": 1790, "guys": 1791, "ba": 1792, "ð٤": 1793, "baby": 1794, "scre": 1795, "ðŁĺį": 1796, "trump": 1797, "under": 1798, "change": 1799, "ian": 1800, "colle": 1801, "sses": 1802, "ler": 1803, "ssed": 1804, "nice": 1805, "announ": 1806, "power": 1807, "sar": 1808, "aking": 1809, "mini": 1810, "sli": 1811, "swee": 1812, "kar": 1813, "ful": 1814, "cru": 1815, "action": 1816, "ather": 1817, ").": 1818, "stand": 1819, "devel": 1820, "aa": 1821, "gan": 1822, "left": 1823, "lol": 1824, "rel": 1825, "trans": 1826, "ments": 1827, "int": 1828, "ef": 1829, "manag": 1830, "dig": 1831, "gener": 1832, "down": 1833, "pau": 1834, "tiv": 1835, "ku": 1836, "thur": 1837, "ken": 1838, "ston": 1839, "fans": 1840, "talk": 1841, "tweet": 1842, "too": 1843, "style": 1844, "prote": 1845, "secon": 1846, "fron": 1847, "awesome": 1848, "gl": 1849, "pal": 1850, "net": 1851, "sor": 1852, "lau": 1853, "gon": 1854, "since": 1855, "tty": 1856, "series": 1857, "memor": 1858, "beli": 1859, "film": 1860, "did": 1861, "dies": 1862, "ot": 1863, "congratulations": 1864, "pra": 1865, "eve": 1866, "woo": 1867, "official": 1868, "suc": 1869, "incre": 1870, "bon": 1871, "part": 1872, "pped": 1873, "class": 1874, "sive": 1875, "boy": 1876, "cul": 1877, "perfect": 1878, "tou": 1879, "dam": 1880, "welcome": 1881, "football": 1882, "hi": 1883, "pap": 1884, "wait": 1885, "ada": 1886, "congrats": 1887, "young": 1888, "excited": 1889, "rece": 1890, "jan": 1891, "va": 1892, "red": 1893, "stra": 1894, "media": 1895, "'d": 1896, "does": 1897, "let": 1898, "mul": 1899, "ills": 1900, "green": 1901, "mel": 1902, "toge": 1903, "future": 1904, "yester": 1905, "versity": 1906, "form": 1907, "tain": 1908, "ide": 1909, "ches": 1910, "kids": 1911, "qui": 1912, "haha": 1913, "deta": 1914, "big": 1915, "favorite": 1916, "girls": 1917, "contin": 1918, "dom": 1919, "search": 1920, "ual": 1921, "air": 1922, "ders": 1923, "month": 1924, "cer": 1925, "yesterday": 1926, "community": 1927, "ade": 1928, "dog": 1929, "ville": 1930, "ices": 1931, "deli": 1932, "syste": 1933, "run": 1934, "ism": 1935, "heart": 1936, "cup": 1937, "enti": 1938, "few": 1939, "president": 1940, "eds": 1941, "until": 1942, "festi": 1943, "ok": 1944, "flo": 1945, "said": 1946, "ole": 1947, "med": 1948, "travel": 1949, "£": 1950, "phone": 1951, "together": 1952, "fast": 1953, "lot": 1954, "games": 1955, "shir": 1956, "between": 1957, "yes": 1958, "thers": 1959, "doing": 1960, "mac": 1961, "ator": 1962, "band": 1963, "follow": 1964, "project": 1965, "develop": 1966, "diffe": 1967, "confe": 1968, "speci": 1969, "cast": 1970, "ys": 1971, "board": 1972, "rd": 1973, "ial": 1974, "shoo": 1975, "ram": 1976, "having": 1977, "share": 1978, "follow": 1979, "one": 1980, "name": 1981, "mr": 1982, "put": 1983, "discu": 1984, "ory": 1985, "came": 1986, "ous": 1987, "site": 1988, "twitter": 1989, "tb": 1990, "tit": 1991, "finally": 1992, "zed": 1993, "super": 1994, "compan": 1995, "using": 1996, "alls": 1997, "list": 1998, "ris": 1999, "shot": 2000, "gal": 2001, "tar": 2002, "del": 2003, "john": 2004, "âĢĶ": 2005, "something": 2006, "ram": 2007, "intere": 2008, "whe": 2009, "bit": 2010, "ðŁį": 2011, "street": 2012, "ound": 2013, "ai": 2014, "tickets": 2015, "movie": 2016, "real": 2017, "ky": 2018, "taking": 2019, "opp": 2020, "cc": 2021, "lam": 2022, "moun": 2023, "inve": 2024, "black": 2025, "used": 2026, "online": 2027, "yor": 2028, "local": 2029, "gue": 2030, "cks": 2031, "ow": 2032, "gest": 2033, "boys": 2034, "illion": 2035, "cont": 2036, "reci": 2037, "ined": 2038, "euro": 2039, "now": 2040, "seen": 2041, "ph": 2042, "teach": 2043, "def": 2044, "south": 2045, "such": 2046, "award": 2047, "must": 2048, "issu": 2049, "care": 2050, "feel": 2051, "plu": 2052, "latest": 2053, "sports": 2054, "web": 2055, "tex": 2056, "ement": 2057, "sk": 2058, "fic": 2059, "wan": 2060, "tech": 2061, "ot": 2062, "box": 2063, "ner": 2064, "free": 2065, "tal": 2066, "ash": 2067, "case": 2068, "hot": 2069, "wonder": 2070, "meeting": 2071, "era": 2072, "chall": 2073, "ðŁIJ": 2074, "job": 2075, "ili": 2076, "cool": 2077, "jour": 2078, "ths": 2079, "mo": 2080, "fel": 2081, "die": 2082, "micha": 2083, "ele": 2084, "team": 2085, "service": 2086, "stand": 2087, "makes": 2088, "ping": 2089, "early": 2090, "comes": 2091, "ek": 2092, "holi": 2093, "vers": 2094, "ague": 2095, "sau": 2096, "three": 2097, "monday": 2098, "fashi": 2099, "someone": 2100, "thro": 2101, "sea": 2102, "bad": 2103, "suppor": 2104, "turn": 2105, "ury": 2106, "ming": 2107, "photography": 2108, "nic": 2109, "mark": 2110, "pretty": 2111, "ssing": 2112, "watching": 2113, "memb": 2114, "arri": 2115, "county": 2116, "beach": 2117, "fran": 2118, "center": 2119, "police": 2120, "bat": 2121, "public": 2122, "tan": 2123, "press": 2124, "saf": 2125, "sy": 2126, "gets": 2127, "roy": 2128, "ners": 2129, "your": 2130, "buy": 2131, "sters": 2132, "show": 2133, "ased": 2134, "childre": 2135, "afric": 2136, "ines": 2137, "space": 2138, "scri": 2139, "hall": 2140, "pain": 2141, "aring": 2142, "home": 2143, "mur": 2144, "health": 2145, "ched": 2146, "sand": 2147, "recei": 2148, "guy": 2149, "ea": 2150, "american": 2151, "resi": 2152, "children": 2153, "--": 2154, "iri": 2155, "ington": 2156, "country": 2157, "ross": 2158, "len": 2159, "anna": 2160, "books": 2161, "bc": 2162, "ece": 2163, "dom": 2164, "lovely": 2165, "kh": 2166, "pet": 2167, "gy": 2168, "gri": 2169, "stage": 2170, "office": 2171, "rock": 2172, "mon": 2173, "bay": 2174, "table": 2175, "sun": 2176, "med": 2177, "thin": 2178, "lor": 2179, "flow": 2180, "(@": 2181, "university": 2182, "store": 2183, "front": 2184, "good": 2185, "za": 2186, "vote": 2187, "north": 2188, "hey": 2189, "anim": 2190, "order": 2191, "mid": 2192, "without": 2193, "ade": 2194, "remember": 2195, "market": 2196, "??": 2197, "mus": 2198, "training": 2199, "educ": 2200, "but": 2201, "cover": 2202, "stan": 2203, "scen": 2204, "bla": 2205, "break": 2206, "lou": 2207, "same": 2208, "gold": 2209, "ain": 2210, "os": 2211, "both": 2212, "lit": 2213, "vern": 2214, "ai": 2215, "albu": 2216, "pa": 2217, "enjoy": 2218, "beg": 2219, "elling": 2220, "thursday": 2221, "info": 2222, "san": 2223, "america": 2224, "hair": 2225, "tel": 2226, "march": 2227, "concer": 2228, "college": 2229, "conference": 2230, "app": 2231, "hour": 2232, "chang": 2233, "âļ": 2234, "sour": 2235, "ols": 2236, "weather": 2237, "war": 2238, "phi": 2239, "festival": 2240, "second": 2241, "cute": 2242, "prac": 2243, "ener": 2244, "stry": 2245, "lea": 2246, "polit": 2247, "sav": 2248, "sen": 2249, "ow": 2250, "mi": 2251, "near": 2252, "ought": 2253, "ze": 2254, "coffe": 2255, "willi": 2256, "dan": 2257, "sey": 2258, "david": 2259, "ese": 2260, "fan": 2261, "deci": 2262, "theat": 2263, "nov": 2264, "ation": 2265, "trac": 2266, "sci": 2267, "review": 2268, "cel": 2269, "em": 2270, "un": 2271, "july": 2272, "orig": 2273, "tion": 2274, "dru": 2275, "former": 2276, "stay": 2277, "after": 2278, "inv": 2279, "took": 2280, "data": 2281, "bal": 2282, "tues": 2283, "dan": 2284, "evening": 2285, "ðŁĺĤðŁĺĤ": 2286, "dol": 2287, "ures": 2288, "provi": 2289, "ts": 2290, "est": 2291, "sign": 2292, "jac": 2293, "uk": 2294, "song": 2295, "yet": 2296, "bow": 2297, "indu": 2298, "jap": 2299, "hoo": 2300, "point": 2301, "anyone": 2302, "zy": 2303, "ist": 2304, "hur": 2305, "ital": 2306, "building": 2307, "woman": 2308, "chur": 2309, "jer": 2310, "perfor": 2311, "coach": 2312, "league": 2313, "cess": 2314, "net": 2315, "imag": 2316, "nation": 2317, "brit": 2318, "que": 2319, "awards": 2320, "ages": 2321, "works": 2322, "ced": 2323, "mance": 2324, "late": 2325, "ign": 2326, "money": 2327, "true": 2328, "ii": 2329, "tell": 2330, "plac": 2331, "pac": 2332, "asy": 2333, "world": 2334, "behin": 2335, "import": 2336, "reading": 2337, "gram": 2338, "giving": 2339, "met": 2340, "hit": 2341, "forward": 2342, "stom": 2343, "present": 2344, "june": 2345, "social": 2346, "noon": 2347, "mart": 2348, "half": 2349, "swe": 2350, "govern": 2351, "ker": 2352, "details": 2353, "lish": 2354, "__": 2355, "acy": 2356, "sia": 2357, "bert": 2358, "fall": 2359, "!!!!": 2360, "),": 2361, "thi": 2362, "diti": 2363, "sport": 2364, "king": 2365, "fit": 2366, "staf": 2367, "cat": 2368, "muse": 2369, "centr": 2370, "yer": 2371, "contro": 2372, "bloo": 2373, "walk": 2374, "actu": 2375, "didn": 2376, "lim": 2377, "learning": 2378, "research": 2379, "wedne": 2380, "auth": 2381, "hours": 2382, "ky": 2383, "far": 2384, "hen": 2385, "....": 2386, "itch": 2387, "ril": 2388, "strong": 2389, "sky": 2390, "questi": 2391, "james": 2392, "ron": 2393, "dg": 2394, "fur": 2395, "cin": 2396, "does": 2397, "appro": 2398, "marke": 2399, "tures": 2400, "fully": 2401, "chat": 2402, "behind": 2403, "tem": 2404, "fini": 2405, "mission": 2406, "batt": 2407, "feel": 2408, "heav": 2409, "everything": 2410, "bar": 2411, "wish": 2412, "premi": 2413, "ima": 2414, "experience": 2415, "each": 2416, "report": 2417, "sweet": 2418, "tics": 2419, "spring": 2420, "respon": 2421, "system": 2422, "victor": 2423, "lin": 2424, "saw": 2425, "already": 2426, "ghter": 2427, "fle": 2428, "ãĄ": 2429, "bring": 2430, "album": 2431, "--": 2432, "ells": 2433, "stan": 2434, "tom": 2435, "international": 2436, "went": 2437, "anni": 2438, "match": 2439, "pper": 2440, "stone": 2441, "small": 2442, "rain": 2443, "fashion": 2444, "area": 2445, "van": 2446, "agram": 2447, "ko": 2448, "thought": 2449, "worth": 2450, "van": 2451, "mer": 2452, "coffee": 2453, "ites": 2454, "gn": 2455, "artist": 2456, "con": 2457, "arch": 2458, "cir": 2459, "secre": 2460, "ground": 2461, "iso": 2462, "hand": 2463, "com": 2464, "bridge": 2465, "hs": 2466, "xi": 2467, "link": 2468, "pul": 2469, "spl": 2470, "race": 2471, "fli": 2472, "river": 2473, "gas": 2474, "disco": 2475, "dal": 2476, "player": 2477, "fit": 2478, "photos": 2479, "ity": 2480, "ok": 2481, "jor": 2482, "tra": 2483, "april": 2484, "ads": 2485, "adi": 2486, "solu": 2487, "beauty": 2488, "door": 2489, "mess": 2490, "update": 2491, "alia": 2492, "scho": 2493, "ened": 2494, "moment": 2495, "scot": 2496, "science": 2497, "ior": 2498, "ties": 2499, "across": 2500, "ously": 2501, "shes": 2502, "doesn": 2503, "page": 2504, "water": 2505, "million": 2506, "classi": 2507, "lic": 2508, "cast": 2509, "formation": 2510, "michael": 2511, "ello": 2512, "smo": 2513, "ints": 2514, "vision": 2515, "opening": 2516, "ldn": 2517, "austr": 2518, "tuesday": 2519, "winner": 2520, "possi": 2521, "round": 2522, "shirt": 2523, "dit": 2524, "bo": 2525, "ues": 2526, "illed": 2527, "along": 2528, "trip": 2529, "starting": 2530, "impro": 2531, "kan": 2532, "person": 2533, "not": 2534, "reco": 2535, "needs": 2536, "cle": 2537, "lie": 2538, "rest": 2539, "ring": 2540, "winter": 2541, "simp": 2542, "mom": 2543, "beer": 2544, "face": 2545, "tors": 2546, "usa": 2547, "collection": 2548, "geor": 2549, "session": 2550, "trying": 2551, "las": 2552, "lake": 2553, "jen": 2554, "origin": 2555, "student": 2556, "secur": 2557, "vin": 2558, "pics": 2559, "expe": 2560, "comp": 2561, "gonna": 2562, "equ": 2563, "bad": 2564, "ley": 2565, "au": 2566, "members": 2567, "break": 2568, "wall": 2569, "gic": 2570, "dinner": 2571, "bul": 2572, "inspir": 2573, "ri": 2574, "mind": 2575, "ica": 2576, "winning": 2577, "talking": 2578, "tren": 2579, "sis": 2580, "ten": 2581, "wonderful": 2582, "snow": 2583, "hear": 2584, "thom": 2585, "nothing": 2586, "gui": 2587, "stin": 2588, "blog": 2589, "fest": 2590, "bun": 2591, "lee": 2592, "wards": 2593, "chance": 2594, "dress": 2595, "ren": 2596, "paul": 2597, "pes": 2598, "techno": 2599, "russi": 2600, "card": 2601, "east": 2602, "mari": 2603, "wine": 2604, "ti": 2605, "law": 2606, "stric": 2607, "ki": 2608, "ape": 2609, "augu": 2610, "profe": 2611, "ash": 2612, "course": 2613, "mail": 2614, "rently": 2615, "dun": 2616, "mun": 2617, "love": 2618, "island": 2619, "drive": 2620, "sl": 2621, "ended": 2622, "main": 2623, "lost": 2624, "nature": 2625, "âĿ¤ï¸ı": 2626, "chic": 2627, "repor": 2628, "pin": 2629, "pro": 2630, "station": 2631, "cep": 2632, "takes": 2633, "company": 2634, "goes": 2635, "ond": 2636, "mach": 2637, "radio": 2638, "dad": 2639, "rock": 2640, "ja": 2641, "pay": 2642, "champion": 2643, "ee": 2644, "inde": 2645, "tta": 2646, "atic": 2647, "tab": 2648, "believe": 2649, "energy": 2650, "zi": 2651, "tat": 2652, "word": 2653, "once": 2654, "resul": 2655, "yl": 2656, "andre": 2657, "ano": 2658, "instagram": 2659, "close": 2660, "tam": 2661, "custom": 2662, "wa": 2663, "conom": 2664, "shows": 2665, "life": 2666, "kin": 2667, "rob": 2668, "tage": 2669, "nation": 2670, "almost": 2671, "listen": 2672, "save": 2673, "reli": 2674, "ace": 2675, "mary": 2676, "tree": 2677, "forget": 2678, "jack": 2679, "waiting": 2680, "director": 2681, "hill": 2682, "born": 2683, "temp": 2684, "fl": 2685, "ste": 2686, "ona": 2687, "single": 2688, "wednesday": 2689, "united": 2690, "ino": 2691, "@_": 2692, "nel": 2693, "celebrate": 2694, "ending": 2695, "deal": 2696, "ji": 2697, "canada": 2698, "huge": 2699, "track": 2700, "âĢ¢": 2701, "fy": 2702, "fanta": 2703, "ang": 2704, "york": 2705, "release": 2706, "pun": 2707, "episo": 2708, "words": 2709, "tour": 2710, "pack": 2711, "igh": 2712, "classic": 2713, "performance": 2714, "ket": 2715, "afternoon": 2716, "record": 2717, "wins": 2718, "proble": 2719, "âĿ¤": 2720, "four": 2721, "bed": 2722, "bank": 2723, "dance": 2724, "sla": 2725, "called": 2726, "might": 2727, "ap": 2728, "past": 2729, "ðŁļ": 2730, "different": 2731, "ite": 2732, "gift": 2733, "ssive": 2734, "church": 2735, "cus": 2736, "program": 2737, "hotel": 2738, "ice": 2739, "mad": 2740, "security": 2741, "enge": 2742, "dc": 2743, "enough": 2744, "sta": 2745, "ety": 2746, "dead": 2747, "gun": 2748, "hear": 2749, "mir": 2750, "human": 2751, "gress": 2752, "ounds": 2753, "piece": 2754, "breaking": 2755, "garden": 2756, "fight": 2757, "views": 2758, "fish": 2759, "started": 2760, "running": 2761, "green": 2762, "seri": 2763, "sm": 2764, "ask": 2765, "dor": 2766, "death": 2767, "econom": 2768, "eri": 2769, "ird": 2770, "ser": 2771, "lunch": 2772, "âģ¦": 2773, "box": 2774, "natu": 2775, "base": 2776, "ban": 2777, "fal": 2778, "global": 2779, "wild": 2780, "wow": 2781, "outside": 2782, "move": 2783, "lead": 2784, "anal": 2785, "museum": 2786, "ong": 2787, "haw": 2788, "power": 2789, "thank": 2790, "bac": 2791, "charac": 2792, "campa": 2793, "digital": 2794, "ro": 2795, "oper": 2796, "dev": 2797, "wol": 2798, "pati": 2799, "fa": 2800, "male": 2801, "paper": 2802, "illing": 2803, "cs": 2804, "âĄ": 2805, "education": 2806, "taken": 2807, "effe": 2808, "mou": 2809, "sad": 2810, "\".": 2811, "based": 2812, "staff": 2813, "including": 2814, "living": 2815, "ac": 2816, "china": 2817, "mob": 2818, "storm": 2819, "luck": 2820, "phil": 2821, "oo": 2822, "yn": 2823, "travel": 2824, "kel": 2825, "tial": 2826, "price": 2827, "book": 2828, "important": 2829, "bio": 2830, "pool": 2831, "nyc": 2832, "fab": 2833, "load": 2834, "?!": 2835, "challenge": 2836, "cry": 2837, "serve": 2838, "wear": 2839, "bus": 2840, "tain": 2841, "number": 2842, "ror": 2843, "kat": 2844, "iz": 2845, "though": 2846, "hosp": 2847, "mm": 2848, "fair": 2849, "utes": 2850, "hot": 2851, "pop": 2852, "fied": 2853, "camp": 2854, "development": 2855, "libr": 2856, "cali": 2857, "ems": 2858, "âģ¦@": 2859, "bol": 2860, "ised": 2861, "standing": 2862, "model": 2863, "ita": 2864, "gle": 2865, "brown": 2866, "image": 2867, "vered": 2868, "force": 2869, "oil": 2870, "partic": 2871, "shu": 2872, "daily": 2873, "law": 2874, "sec": 2875, "class": 2876, "camp": 2877, "holiday": 2878, "clin": 2879, "kers": 2880, "present": 2881, "game": 2882, "incredi": 2883, "ership": 2884, "interview": 2885, "bill": 2886, "due": 2887, "andy": 2888, "abo": 2889, "innov": 2890, "key": 2891, "acade": 2892, "pil": 2893, "moder": 2894, "stars": 2895, "brand": 2896, "fer": 2897, "weeks": 2898, "consi": 2899, "pre": 2900, "safe": 2901, "writ": 2902, "dium": 2903, "launch": 2904, "marketing": 2905, "annual": 2906, "assi": 2907, "court": 2908, "lady": 2909, "cted": 2910, "anda": 2911, "inside": 2912, "child": 2913, "oppor": 2914, "smith": 2915, "centre": 2916, "gue": 2917, "âģ©": 2918, "fren": 2919, "sty": 2920, "fort": 2921, "ently": 2922, "isn": 2923, "keep": 2924, "tober": 2925, "ony": 2926, "boy": 2927, "ald": 2928, "colla": 2929, "demo": 2930, "level": 2931, "compet": 2932, "ado": 2933, "bour": 2934, "fantastic": 2935, "mate": 2936, "su": 2937, "south": 2938, "opportun": 2939, "versary": 2940, "later": 2941, "bud": 2942, "facebook": 2943, "laun": 2944, "stern": 2945, "pit": 2946, "!\"": 2947, "maj": 2948, "gram": 2949, "tbt": 2950, "fire": 2951, "happy": 2952, "aks": 2953, "whole": 2954, "actually": 2955, "iller": 2956, "ella": 2957, "lots": 2958, "alex": 2959, "ange": 2960, "lands": 2961, "ðŁĺŃ": 2962, "enter": 2963, "rou": 2964, "episode": 2965, "ped": 2966, "inten": 2967, "shire": 2968, "who": 2969, "plan": 2970, "ho": 2971, "cake": 2972, "west": 2973, "magaz": 2974, "fresh": 2975, "cc": 2976, "nar": 2977, "chris": 2978, "writing": 2979, "wer": 2980, "nom": 2981, "lo": 2982, "midd": 2983, "dream": 2984, "ol": 2985, "tional": 2986, "deb": 2987, ">>": 2988, "become": 2989, "si": 2990, "grand": 2991, "alling": 2992, "histor": 2993, "ride": 2994, "ired": 2995, "safe": 2996, "queen": 2997, "cil": 2998, "intro": 2999, "vil": 3000, "dani": 3001, "...": 3002, "artic": 3003, "stat": 3004, "short": 3005, "oring": 3006, "selfi": 3007, "missi": 3008, "doc": 3009, "bit": 3010, "gall": 3011, "bom": 3012, "ire": 3013, "selec": 3014, "dition": 3015, "ðŁĶ„": 3016, "friend": 3017, "beat": 3018, "ghting": 3019, "ðŁĺĬ": 3020, "peace": 3021, "exhi": 3022, "anta": 3023, "ability": 3024, "illu": 3025, "jon": 3026, "quality": 3027, "tribu": 3028, "mes": 3029, "players": 3030, "fair": 3031, "cut": 3032, "cab": 3033, "success": 3034, "bi": 3035, "sus": 3036, "promo": 3037, "sche": 3038, "ange": 3039, "ico": 3040, "commit": 3041, "catch": 3042, "illa": 3043, "kind": 3044, "feeling": 3045, "quo": 3046, "say": 3047, "anniversary": 3048, "spot": 3049, "mother": 3050, "ane": 3051, "pend": 3052, "yourself": 3053, "ops": 3054, "apple": 3055, "minutes": 3056, "po": 3057, "grand": 3058, "ries": 3059, "haha": 3060, "career": 3061, "edition": 3062, "dec": 3063, "rick": 3064, "ami": 3065, "concert": 3066, "itive": 3067, "geous": 3068, "dly": 3069, "tte": 3070, "advent": 3071, "ig": 3072, "lights": 3073, "aker": 3074, "sky": 3075, "âĄ£": 3076, "ray": 3077, "finished": 3078, "way": 3079, "sd": 3080, "accoun": 3081, "ðŁēķ": 3082, "cky": 3083, "chel": 3084, "liter": 3085, "painting": 3086, "los": 3087, "stun": 3088, "technology": 3089, "nas": 3090, "mar": 3091, "bil": 3092, "africa": 3093, "kie": 3094, "eyes": 3095, "golf": 3096, "plus": 3097, "nia": 3098, "itec": 3099, "services": 3100, "wedding": 3101, "known": 3102, "tele": 3103, ".....": 3104, "starts": 3105, "paren": 3106, "wants": 3107, "ational": 3108, "months": 3109, "windo": 3110, "favour": 3111, "ert": 3112, "magazine": 3113, "exclu": 3114, "reve": 3115, "bc": 3116, "original": 3117, "ess": 3118, "nal": 3119, "anti": 3120, "stro": 3121, "tice": 3122, "study": 3123, "à¤": 3124, "vac": 3125, "national": 3126, "five": 3127, "rain": 3128, "vement": 3129, "ute": 3130, "verse": 3131, "emer": 3132, "army": 3133, "possible": 3134, "guess": 3135, "valley": 3136, "thern": 3137, "crow": 3138, "mr": 3139, "color": 3140, "onto": 3141, "pick": 3142, "clear": 3143, "dark": 3144, "tac": 3145, "wanted": 3146, "itting": 3147, "cancer": 3148, "government": 3149, "die": 3150, "rise": 3151, "zing": 3152, "cold": 3153, "foun": 3154, "studio": 3155, "stration": 3156, "brother": 3157, "ahead": 3158, "shel": 3159, "micro": 3160, "ically": 3161, "dau": 3162, "signed": 3163, "viol": 3164, "ax": 3165, "asse": 3166, "io": 3167, "wre": 3168, "splay": 3169, "chick": 3170, "august": 3171, "plat": 3172, "tips": 3173, "spi": 3174, "human": 3175, "easy": 3176, "logi": 3177, "mike": 3178, "grow": 3179, "agre": 3180, "ww": 3181, "shad": 3182, "motiv": 3183, "wide": 3184, "turns": 3185, "omg": 3186, "var": 3187, "defin": 3188, "sug": 3189, "jim": 3190, "ðŁĶ„": 3191, "td": 3192, "campaign": 3193, "named": 3194, "retweet": 3195, "cop": 3196, "tv": 3197, "leav": 3198, "kis": 3199, "double": 3200, "smar": 3201, "issue": 3202, "villa": 3203, "information": 3204, "lies": 3205, "stock": 3206, "nt": 3207, "distric": 3208, "shor": 3209, "mix": 3210, "ero": 3211, "sep": 3212, "mex": 3213, "seeing": 3214, "live": 3215, "remin": 3216, "code": 3217, "gur": 3218, "sc": 3219, "wild": 3220, "lun": 3221, "hood": 3222, "spot": 3223, "father": 3224, "forever": 3225, "upd": 3226, "traf": 3227, "fly": 3228, "need": 3229, "gradu": 3230, "train": 3231, "make": 3232, "sab": 3233, "bey": 3234, "size": 3235, "leader": 3236, "talks": 3237, "eu": 3238, "log": 3239, "fox": 3240, "gorgeous": 3241, "less": 3242, "lets": 3243, "surpri": 3244, "myself": 3245, "note": 3246, "lives": 3247, "fru": 3248, "loved": 3249, "sever": 3250, "dem": 3251, "ji": 3252, "soc": 3253, "hold": 3254, "dogs": 3255, "ni": 3256, "âŀ": 3257, "leave": 3258, "airport": 3259, "benef": 3260, "expl": 3261, "ships": 3262, "complete": 3263, "achi": 3264, "great": 3265, "vintage": 3266, "jack": 3267, "roc": 3268, "wood": 3269, "priv": 3270, "offer": 3271, "eye": 3272, "version": 3273, "tea": 3274, "coach": 3275, "offic": 3276, "well": 3277, "gen": 3278, "sat": 3279, "hh": 3280, "youth": 3281, "ox": 3282, "?\"": 3283, "mt": 3284, "mix": 3285, "gg": 3286, "dle": 3287, "natural": 3288, "build": 3289, "breakfast": 3290, "thinking": 3291, "theatre": 3292, "moon": 3293, "berg": 3294, "goals": 3295, "george": 3296, "ene": 3297, "excell": 3298, "iling": 3299, "tune": 3300, "yed": 3301, "gate": 3302, "mit": 3303, "network": 3304, "joe": 3305, "hello": 3306, "fb": 3307, "tube": 3308, "wearing": 3309, "athle": 3310, "struc": 3311, "hard": 3312, "glass": 3313, "gers": 3314, "throw": 3315, "ges": 3316, "bt": 3317, "industry": 3318, "management": 3319, "alist": 3320, "goal": 3321, "stream": 3322, "yel": 3323, "avi": 3324, "icious": 3325, "others": 3326, "ski": 3327, "christi": 3328, "bird": 3329, "esc": 3330, "min": 3331, "tro": 3332, "lt": 3333, "jan": 3334, "imp": 3335, "rights": 3336, "sha": 3337, "organ": 3338, "central": 3339, "ara": 3340, "roll": 3341, "favourite": 3342, "chester": 3343, "else": 3344, "pay": 3345, "cars": 3346, "mine": 3347, "step": 3348, "practice": 3349, "major": 3350, "hang": 3351, "ðŁĺĺ": 3352, "non": 3353, "vari": 3354, "engine": 3355, "volun": 3356, "dia": 3357, "iled": 3358, "architec": 3359, "pink": 3360, "ds": 3361, "thy": 3362, "wash": 3363, "website": 3364, "bag": 3365, "control": 3366, "elli": 3367, "fra": 3368, "answ": 3369, "dence": 3370, "yu": 3371, "ron": 3372, "ola": 3373, "gin": 3374, "drin": 3375, "lic": 3376, "couple": 3377, "spar": 3378, "gon": 3379, "create": 3380, "ct": 3381, "celebrating": 3382, "deep": 3383, "eat": 3384, "tee": 3385, "voice": 3386, "drop": 3387, "visit": 3388, "ators": 3389, "stadium": 3390, "ft": 3391, "wis": 3392, "rol": 3393, "grade": 3394, "famil": 3395, "points": 3396, "repre": 3397, "was": 3398, "traffic": 3399, "japan": 3400, "org": 3401, "honor": 3402, "texas": 3403, "manu": 3404, "âĻ„": 3405, "safety": 3406, "rer": 3407, "bag": 3408, "emplo": 3409, "released": 3410, "regu": 3411, "aka": 3412, "nav": 3413, "role": 3414, "senior": 3415, "spect": 3416, "cross": 3417, "lines": 3418, "best": 3419, "pack": 3420, "sin": 3421, "tie": 3422, "missing": 3423, "sunset": 3424, "liber": 3425, "ising": 3426, "jay": 3427, "ski": 3428, "championship": 3429, "activ": 3430, "ladies": 3431, "played": 3432, "yy": 3433, "publ": 3434, "alo": 3435, "pride": 3436, "sr": 3437, "paki": 3438, "lux": 3439, "survi": 3440, "cked": 3441, "ets": 3442, "chocol": 3443, "australia": 3444, "paris": 3445, "miles": 3446, "hat": 3447, "mental": 3448, "ala": 3449, "mean": 3450, "mobile": 3451, "ena": 3452, "insi": 3453, "found": 3454, "chief": 3455, "tag": 3456, "incredible": 3457, "return": 3458, "é": 3459, "google": 3460, "french": 3461, "crew": 3462, "hallo": 3463, "alian": 3464, "jaz": 3465, "cher": 3466, "silver": 3467, "north": 3468, "english": 3469, "baseball": 3470, "caf": 3471, "limited": 3472, "following": 3473, "appreci": 3474, "earth": 3475, "kir": 3476, "vember": 3477, "wed": 3478, "ption": 3479, "ged": 3480, "october": 3481, "flori": 3482, "cr": 3483, "ency": 3484, "gave": 3485, "lord": 3486, "stuff": 3487, "berry": 3488, "post": 3489, "smile": 3490, "broad": 3491, "state": 3492, "gger": 3493, "means": 3494, "icy": 3495, "gun": 3496, "yo": 3497, "master": 3498, "burg": 3499, "hands": 3500, "nie": 3501, "//": 3502, "union": 3503, "british": 3504, "biggest": 3505, "district": 3506, "aming": 3507, "hil": 3508, "oce": 3509, "person": 3510, "pass": 3511, "envir": 3512, "schools": 3513, "arrived": 3514, "ances": 3515, "inspired": 3516, "expla": 3517, "ben": 3518, "library": 3519, "bott": 3520, "amp": 3521, "steph": 3522, "contact": 3523, "bang": 3524, "ms": 3525, "califor": 3526, "told": 3527, "battle": 3528, "bb": 3529, "chicago": 3530, "⾨": 3531, "strate": 3532, "shi": 3533, "dece": 3534, "-)": 3535, "add": 3536, "lab": 3537, "jones": 3538, "legend": 3539, "castle": 3540, "inger": 3541, "stance": 3542, "bel": 3543, "ura": 3544, "refu": 3545, "leaders": 3546, "pot": 3547, "sex": 3548, "hic": 3549, "article": 3550, "kid": 3551, "france": 3552, "xx": 3553, "exe": 3554, "guide": 3555, "volunte": 3556, "print": 3557, "ali": 3558, "ceo": 3559, "tweets": 3560, "wx": 3561, "scene": 3562, "volu": 3563, "anti": 3564, "han": 3565, "associ": 3566, "sharing": 3567, "rose": 3568, "minister": 3569, "sher": 3570, "inste": 3571, "clean": 3572, "democr": 3573, "poster": 3574, "skin": 3575, "psy": 3576, "proper": 3577, "crazy": 3578, "iam": 3579, "ore": 3580, "ini": 3581, "anything": 3582, "pod": 3583, "moving": 3584, "click": 3585, "explo": 3586, "comb": 3587, "craft": 3588, "fi": 3589, "blood": 3590, "isra": 3591, "public": 3592, "dent": 3593, "olym": 3594, "england": 3595, "asi": 3596, "cher": 3597, "fact": 3598, "environ": 3599, "harry": 3600, "gone": 3601, "medic": 3602, "enjoying": 3603, "justice": 3604, "jr": 3605, "indian": 3606, "wife": 3607, "sound": 3608, "tes": 3609, "drawing": 3610, "pal": 3611, "idea": 3612, "crit": 3613, "juli": 3614, "iler": 3615, "warm": 3616, "clar": 3617, "thoughts": 3618, "defen": 3619, "council": 3620, "introduc": 3621, "died": 3622, "janu": 3623, "ani": 3624, "send": 3625, "lier": 3626, "ml": 3627, "interesting": 3628, "trade": 3629, "wind": 3630, "bay": 3631, "sac": 3632, "ancy": 3633, "source": 3634, "bes": 3635, "organi": 3636, "arly": 3637, "large": 3638, "ffici": 3639, "tag": 3640, "ut": 3641, "desp": 3642, "oes": 3643, "title": 3644, "sym": 3645, "pictures": 3646, "open": 3647, "women": 3648, "showing": 3649, "ria": 3650, "least": 3651, "leadership": 3652, "current": 3653, "electr": 3654, "valent": 3655, "listening": 3656, "ckey": 3657, "general": 3658, "deser": 3659, "duce": 3660, ";)": 3661, "cent": 3662, "ðŁĺįðŁĺį": 3663, "scott": 3664, "poor": 3665, "selfie": 3666, "events": 3667, "ion": 3668, "wrong": 3669, "dev": 3670, "hill": 3671, "septe": 3672, "culture": 3673, "line": 3674, "sorry": 3675, "sent": 3676, "sister": 3677, "cept": 3678, "kri": 3679, "november": 3680, "ari": 3681, "announce": 3682, "zation": 3683, "bran": 3684, "gent": 3685, "du": 3686, "len": 3687, "pers": 3688, "fm": 3689, "martin": 3690, "op": 3691, "emb": 3692, "ome": 3693, "middle": 3694, "success": 3695, "peter": 3696, "january": 3697, "flu": 3698, "racing": 3699, "dav": 3700, "bike": 3701, "ðŁı»": 3702, "pet": 3703, "shoot": 3704, "professi": 3705, "featuring": 3706, "september": 3707, "nowplaying": 3708, "staur": 3709, "za": 3710, "onic": 3711, "quick": 3712, "baske": 3713, "speaking": 3714, "milit": 3715, "zer": 3716, "chicken": 3717, "bell": 3718, "sad": 3719, "coast": 3720, "loving": 3721, "yers": 3722, "dj": 3723, "panel": 3724, "verage": 3725, "swit": 3726, "icks": 3727, "bou": 3728, "california": 3729, "sam": 3730, "parents": 3731, "ero": 3732, "killed": 3733, "phys": 3734, "jobs": 3735, "migr": 3736, "anth": 3737, "emo": 3738, "halloween": 3739, "ander": 3740, "cm": 3741, "competition": 3742, "eag": 3743, "sket": 3744, "spir": 3745, "maybe": 3746, "exclusive": 3747, "appe": 3748, "journey": 3749, "screen": 3750, "ford": 3751, "io": 3752, "hate": 3753, "ug": 3754, "soul": 3755, "hero": 3756, "society": 3757, "syn": 3758, "guit": 3759, "nh": 3760, "dj": 3761, "ases": 3762, "impre": 3763, "time": 3764, "sales": 3765, "dd": 3766, "fts": 3767, "summit": 3768, "stunning": 3769, "oms": 3770, "turned": 3771, "clean": 3772, "soft": 3773, "beat": 3774, "restaur": 3775, "dered": 3776, "ences": 3777, "magic": 3778, "dio": 3779, "shine": 3780, "guest": 3781, "healthy": 3782, "exhib": 3783, "stories": 3784, "popu": 3785, "nis": 3786, "ela": 3787, "below": 3788, "funny": 3789, "results": 3790, "sne": 3791, "currently": 3792, "ard": 3793, "download": 3794, "flight": 3795, "mal": 3796, "fine": 3797, "pad": 3798, "chu": 3799, "ented": 3800, "hat": 3801, "ðŁijı": 3802, "steve": 3803, "jo": 3804, "mark": 3805, "rat": 3806, "ball": 3807, "pc": 3808, "pon": 3809, "bby": 3810, "oli": 3811, "arts": 3812, "asure": 3813, "bowl": 3814, "attack": 3815, "mic": 3816, "dear": 3817, "range": 3818, "enter": 3819, "chocolate": 3820, "brilli": 3821, "access": 3822, ",\"": 3823, "???": 3824, "chap": 3825, "const": 3826, "tn": 3827, "matter": 3828, "blue": 3829, "gallery": 3830, "emp": 3831, "workshop": 3832, "leading": 3833, "yours": 3834, "basketball": 3835, "wanna": 3836, "thu": 3837, "__": 3838, "marri": 3839, "sleep": 3840, "bia": 3841, "che": 3842, "mad": 3843, "impact": 3844, "own": 3845, "sir": 3846, "channel": 3847, "europe": 3848, "esp": 3849, "kitch": 3850, "hospital": 3851, "wra": 3852, "royal": 3853, "fs": 3854, "neu": 3855, "quar": 3856, "ney": 3857, "acks": 3858, "chase": 3859, "ppy": 3860, "stal": 3861, "ately": 3862, "tim": 3863, "december": 3864, "rare": 3865, "perform": 3866, "cream": 3867, "weight": 3868, "choo": 3869, "night": 3870, "haven": 3871, "franc": 3872, "khan": 3873, "built": 3874, "helping": 3875, "trust": 3876, "type": 3877, "golden": 3878, "tax": 3879, "snow": 3880, "swi": 3881, "disa": 3882, "questions": 3883, "vey": 3884, "light": 3885, "cn": 3886, "cloud": 3887, "thomas": 3888, "aged": 3889, "shou": 3890, "teams": 3891, "gran": 3892, "reason": 3893, "aa": 3894, "youtube": 3895, "vp": 3896, "pizz": 3897, "manager": 3898, "bury": 3899, "credit": 3900, "treat": 3901, "max": 3902, "ik": 3903, "main": 3904, "ging": 3905, "dead": 3906, "probab": 3907, "yeah": 3908, "ãĤ": 3909, "brand": 3910, "soli": 3911, "plant": 3912, "tayl": 3913, "girl": 3914, "ðŁĺŃ": 3915, "nament": 3916, "auto": 3917, "message": 3918, "kore": 3919, "nur": 3920, "terr": 3921, "agu": 3922, "map": 3923, "senting": 3924, "loves": 3925, "gives": 3926, "gab": 3927, "zen": 3928, "robert": 3929, "confir": 3930, "wars": 3931, "om": 3932, "stain": 3933, "camera": 3934, "ander": 3935, "wonder": 3936, "ab": 3937, "cap": 3938, "sold": 3939, "suit": 3940, "walking": 3941, "continue": 3942, "effec": 3943, "daughter": 3944, "danc": 3945, "chain": 3946, "multi": 3947, "kid": 3948, "yan": 3949, "champion": 3950, "vo": 3951, "tains": 3952, "host": 3953, "mini": 3954, "missed": 3955, "resc": 3956, "lyn": 3957, "finish": 3958, "delicious": 3959, "sas": 3960, "taylor": 3961, "ib": 3962, "promis": 3963, "products": 3964, "mountain": 3965, "florida": 3966, "register": 3967, "treat": 3968, "recent": 3969, "female": 3970, "booth": 3971, "matt": 3972, "vehic": 3973, "sop": 3974, "motor": 3975, "supporting": 3976, "phic": 3977, "extre": 3978, "drink": 3979, "lane": 3980, "third": 3981, "ps": 3982, "constru": 3983, "cere": 3984, "farm": 3985, "ðŁİī": 3986, "tured": 3987, "ðŁijī": 3988, "cats": 3989, "aj": 3990, "gie": 3991, "shooting": 3992, "asked": 3993, "pakistan": 3994, "ame": 3995, "mb": 3996, "gil": 3997, "legal": 3998, "square": 3999, "invol": 4000, "draw": 4001, "oooo": 4002, "!!!!": 4003, "opportunity": 4004, "py": 4005, "ei": 4006, "bts": 4007, "teacher": 4008, "character": 4009, "johnson": 4010, "bron": 4011, "lywood": 4012, "chine": 4013, "cing": 4014, "cine": 4015, "dge": 4016, "gaming": 4017, "russia": 4018, "cia": 4019, "quote": 4020, "rich": 4021, "gov": 4022, "flowers": 4023, "spiri": 4024, "stin": 4025, "growth": 4026, "ðŁı¼": 4027, "commer": 4028, "juni": 4029, "mum": 4030, "ran": 4031, "sna": 4032, "aren": 4033, "cb": 4034, "actor": 4035, "color": 4036, "sit": 4037, "pair": 4038, "chi": 4039, "bow": 4040, "academy": 4041, "held": 4042, "rang": 4043, "metal": 4044, "yl": 4045, "active": 4046, "probably": 4047, "tch": 4048, "needed": 4049, "spee": 4050, "choice": 4051, "italy": 4052, "ryan": 4053, "ðŁĩº": 4054, "flower": 4055, "vit": 4056, "mn": 4057, "foundation": 4058, "bak": 4059, "sions": 4060, "neigh": 4061, "floo": 4062, "heard": 4063, "remo": 4064, "fresh": 4065, "inging": 4066, "ref": 4067, "town": 4068, "clou": 4069, "jesus": 4070, "spirit": 4071, "couldn": 4072, "zes": 4073, "ðŁēĻ": 4074, "williams": 4075, "proce": 4076, "modern": 4077, "process": 4078, "shoes": 4079, "created": 4080, "tric": 4081, "issues": 4082, "anne": 4083, "atten": 4084, "debut": 4085, "hr": 4086, "nit": 4087, "stig": 4088, "apo": 4089, "eps": 4090, "zu": 4091, "ãĢ": 4092, "six": 4093, "cards": 4094, "langu": 4095, "famous": 4096, "tournament": 4097, "sel": 4098, "ebay": 4099, "yn": 4100, "ston": 4101, "kick": 4102, "announced": 4103, "kam": 4104, "voc": 4105, "brilliant": 4106, "house": 4107, "cheese": 4108, "warri": 4109, "music": 4110, "hockey": 4111, "ðŁĺĤðŁĺĤ": 4112, "skills": 4113, "autom": 4114, "smart": 4115, "medical": 4116, "mony": 4117, "ex": 4118, "guar": 4119, "give": 4120, "personal": 4121, "vention": 4122, "alli": 4123, "press": 4124, "floor": 4125, "mc": 4126, "victory": 4127, "him": 4128, "simple": 4129, "thor": 4130, "ðŁĩºðŁĩ": 4131, "tail": 4132, "lucky": 4133, "alex": 4134, "quite": 4135, "bot": 4136, "ssions": 4137, "challeng": 4138, "cann": 4139, "amazon": 4140, "hell": 4141, "bought": 4142, "):": 4143, "edy": 4144, "secret": 4145, "production": 4146, "independ": 4147, "defe": 4148, "added": 4149, "pr": 4150, "pag": 4151, "bed": 4152, "greatest": 4153, "within": 4154, "jay": 4155, "ðŁ„": 4156, "ireland": 4157, "rely": 4158, "sd": 4159, "text": 4160, "driving": 4161, "program": 4162, "speed": 4163, "colum": 4164, "stron": 4165, "é": 4166, "forest": 4167, "âĸ": 4168, "machine": 4169, "coin": 4170, "scar": 4171, "ount": 4172, "bie": 4173, "”ï¸ı": 4174, "portra": 4175, "common": 4176, "wrest": 4177, "received": 4178, "know": 4179, "invest": 4180, "plans": 4181, "accor": 4182, "adop": 4183, "tery": 4184, "reali": 4185, "pp": 4186, "kal": 4187, "artwork": 4188, "mean": 4189, "god": 4190, "instead": 4191, "anci": 4192, "motivation": 4193, "asing": 4194, "inspiration": 4195, "upcoming": 4196, "political": 4197, "europe": 4198, "mers": 4199, "heavy": 4200, "ðŁijį": 4201, "febru": 4202, "scotland": 4203, "ough": 4204, "bt": 4205, "boss": 4206, "schedu": 4207, "speak": 4208, "nick": 4209, "ured": 4210, "ino": 4211, "ek": 4212, "risk": 4213, "tory": 4214, "presents": 4215, "bon": 4216, "rug": 4217, "states": 4218, "exhibition": 4219, "ilo": 4220, "mill": 4221, "brought": 4222, ":-)": 4223, "touri": 4224, "come": 4225, "officially": 4226, "champions": 4227, "doors": 4228, "rep": 4229, "pose": 4230, "extra": 4231, "kings": 4232, "soccer": 4233, "squad": 4234, "applic": 4235, "ata": 4236, "sometimes": 4237, "tari": 4238, "excellent": 4239, "ðŁĺĺ": 4240, "straight": 4241, "carol": 4242, "rip": 4243, "âĢį": 4244, "graphic": 4245, "mol": 4246, "election": 4247, "february": 4248, "asons": 4249, "li": 4250, "dir": 4251, "mt": 4252, "nick": 4253, "usu": 4254, "mrs": 4255, "comics": 4256, "institu": 4257, "corpor": 4258, "vi": 4259, "ðŁĻı": 4260, "tural": 4261, "dise": 4262, "acci": 4263, "weare": 4264, "among": 4265, "shopping": 4266, "till": 4267, "what": 4268, "chair": 4269, "span": 4270, "chinese": 4271, "innovation": 4272, "joy": 4273, "kit": 4274, "century": 4275, "obama": 4276, "phili": 4277, "fc": 4278, "reach": 4279, "citi": 4280, "ulous": 4281, "non": 4282, "dang": 4283, "happening": 4284, "burn": 4285, "pel": 4286, "orange": 4287, "dv": 4288, "kick": 4289, "claim": 4290, "ingham": 4291, "phy": 4292, "nov": 4293, "podcast": 4294, "whi": 4295, "nights": 4296, "earlier": 4297, "bear": 4298, "lah": 4299, "exciting": 4300, "ora": 4301, "given": 4302, "slo": 4303, "memories": 4304, "continues": 4305, "product": 4306, "gho": 4307, "cd": 4308, "knows": 4309, "ðŁİī": 4310, "published": 4311, "discuss": 4312, "yard": 4313, "iphone": 4314, "tries": 4315, "wall": 4316, "feb": 4317, "aren": 4318, "truth": 4319, "winners": 4320, "ture": 4321, "ditional": 4322, "military": 4323, "problem": 4324, "mand": 4325, "dog": 4326, "loss": 4327, "cric": 4328, "canadi": 4329, "veter": 4330, "village": 4331, "\",": 4332, "yr": 4333, "ung": 4334, "donald": 4335, "aging": 4336, "birds": 4337, "scienti": 4338, "les": 4339, "this": 4340, "region": 4341, "tical": 4342, "itten": 4343, "ila": 4344, "ðŁĺİ": 4345, "dad": 4346, "diam": 4347, "above": 4348, "stren": 4349, "lit": 4350, "pir": 4351, "lab": 4352, "focus": 4353, "busy": 4354, "dur": 4355, "apply": 4356, "sma": 4357, "author": 4358, "aci": 4359, "execu": 4360, "domin": 4361, "rela": 4362, "jackson": 4363, "ato": 4364, "washington": 4365, "ðŁĻĮ": 4366, "kill": 4367, "popular": 4368, "cement": 4369, "road": 4370, "eating": 4371, "location": 4372, "vent": 4373, "arre": 4374, "nan": 4375, "custo": 4376, "adventure": 4377, "ordin": 4378, "sport": 4379, "ult": 4380, "lock": 4381, "question": 4382, "driver": 4383, "landsc": 4384, "oni": 4385, "kins": 4386, "pd": 4387, "jordan": 4388, "tered": 4389, "kk": 4390, "af": 4391, "child": 4392, "sp": 4393, "justin": 4394, "eni": 4395, "selling": 4396, "zo": 4397, "whit": 4398, "boston": 4399, "particip": 4400, "signing": 4401, "happened": 4402, "heat": 4403, "mam": 4404, "dreams": 4405, "lows": 4406, "graph": 4407, "theday": 4408, "heading": 4409, "bro": 4410, "blessed": 4411, "vic": 4412, "vegas": 4413, "hd": 4414, "inning": 4415, "roman": 4416, "andro": 4417, "denti": 4418, "use": 4419, "cit": 4420, "progress": 4421, "writer": 4422, "bob": 4423, "ffs": 4424, "growing": 4425, "bly": 4426, "aware": 4427, "exam": 4428, "spent": 4429, "bet": 4430, "score": 4431, "beyond": 4432, "docu": 4433, "adel": 4434, "sf": 4435, "coura": 4436, "collabor": 4437, "inc": 4438, "private": 4439, "boat": 4440, "**": 4441, "zone": 4442, "pha": 4443, "bill": 4444, "total": 4445, "planning": 4446, "towards": 4447, "places": 4448, "preview": 4449, "creative": 4450, "damn": 4451, "ideas": 4452, "seems": 4453, "poten": 4454, "saying": 4455, "display": 4456, "sw": 4457, "aqu": 4458, "louis": 4459, "bye": 4460, "lil": 4461, "email": 4462, "western": 4463, "germany": 4464, "eller": 4465, "res": 4466, "fant": 4467, "mentary": 4468, "deals": 4469, "richard": 4470, "jersey": 4471, "streng": 4472, "rad": 4473, "pizza": 4474, "mond": 4475, "ware": 4476, "lac": 4477, "gi": 4478, "archi": 4479, "cd": 4480, "yellow": 4481, "recently": 4482, "reach": 4483, "à¹": 4484, "kitchen": 4485, "designed": 4486, "try": 4487, "gal": 4488, "restaurant": 4489, "ature": 4490, "ww": 4491, "jas": 4492, "lma": 4493, "ðŁijĮ": 4494, "pain": 4495, "avo": 4496, "minute": 4497, "schol": 4498, "therap": 4499, "ticket": 4500, "dry": 4501, "japan": 4502, "ditions": 4503, "terri": 4504, "selves": 4505, "happen": 4506, "tup": 4507, "mag": 4508, "copy": 4509, "sher": 4510, "freedom": 4511, "file": 4512, "specially": 4513, "toronto": 4514, "load": 4515, "gary": 4516, "rey": 4517, "answer": 4518, "loy": 4519, "caught": 4520, "prize": 4521, "une": 4522, "fication": 4523, "niger": 4524, "syd": 4525, "touch": 4526, "feature": 4527, "jazz": 4528, "records": 4529, "himself": 4530, "dish": 4531, "rober": 4532, "spotted": 4533, "master": 4534, "wave": 4535, "finals": 4536, "bull": 4537, "forum": 4538, "ald": 4539, "recomm": 4540, "cha": 4541, "ae": 4542, "doo": 4543, "instru": 4544, "truly": 4545, "lg": 4546, "ink": 4547, "brothers": 4548, "dest": 4549, "jim": 4550, "mit": 4551, "closed": 4552, "ison": 4553, "tried": 4554, "santa": 4555, "affe": 4556, "wan": 4557, "horse": 4558, "grow": 4559, "campus": 4560, "relation": 4561, "native": 4562, "journ": 4563, "gov": 4564, "oct": 4565, "kit": 4566, "bound": 4567, "partner": 4568, "rema": 4569, "crowd": 4570, "!)": 4571, "calls": 4572, "rail": 4573, "quali": 4574, "solution": 4575, "contest": 4576, "convers": 4577, "snap": 4578, "base": 4579, "initi": 4580, "tax": 4581, "ye": 4582, "entrepre": 4583, "itor": 4584, "construction": 4585, "food": 4586, "presented": 4587, "nings": 4588, "climate": 4589, "km": 4590, "model": 4591, "bj": 4592, "block": 4593, "presentation": 4594, "dream": 4595, "fix": 4596, "calling": 4597, "busine": 4598, "congress": 4599, "understand": 4600, "web": 4601, "value": 4602, "ï¸ıâĄ£": 4603, "mexico": 4604, "itely": 4605, "kim": 4606, "charity": 4607, "reflec": 4608, "blan": 4609, "flying": 4610, "analy": 4611, "families": 4612, "band": 4613, "recipe": 4614, "celebration": 4615, "accep": 4616, "ary": 4617, "tot": 4618, "gb": 4619, "interested": 4620, "captain": 4621, "âĻ„": 4622, "tip": 4623, "absol": 4624, "braz": 4625, "investig": 4626, "ology": 4627, "dec": 4628, "truck": 4629, "vering": 4630, "clear": 4631, "dont": 4632, "gotta": 4633, "advis": 4634, "begins": 4635, "mass": 4636, "descri": 4637, "block": 4638, "kim": 4639, "david": 4640, "songs": 4641, "memorial": 4642, "features": 4643, "sustain": 4644, "'.": 4645, "grab": 4646, "jose": 4647, "va": 4648, "conserv": 4649, "sets": 4650, "manchester": 4651, "fighting": 4652, "degre": 4653, "aga": 4654, "ind": 4655, "sleep": 4656, "position": 4657, "hair": 4658, "signs": 4659, "policy": 4660, "ito": 4661, "alert": 4662, "stam": 4663, "spend": 4664, "wy": 4665, "absolut": 4666, "dm": 4667, "animal": 4668, "myster": 4669, "successful": 4670, "problems": 4671, "robo": 4672, "kay": 4673, "garden": 4674, "pd": 4675, "mayor": 4676, "dale": 4677, "tol": 4678, "offers": 4679, "visiting": 4680, "friendly": 4681, "trees": 4682, "officer": 4683, "account": 4684, "kevin": 4685, "ðŁijį": 4686, "giant": 4687, "continu": 4688, "consu": 4689, "tract": 4690, "nfl": 4691, "ðŁĺĬ": 4692, "hq": 4693, "bility": 4694, "aar": 4695, "disney": 4696, "teen": 4697, "oned": 4698, "white": 4699, "trailer": 4700, "dedic": 4701, "alone": 4702, "absolutely": 4703, "digital": 4704, "william": 4705, "ination": 4706, "swa": 4707, "ee": 4708, "entire": 4709, "german": 4710, "roll": 4711, "hits": 4712, "cost": 4713, "stay": 4714, "tha": 4715, "alive": 4716, "according": 4717, "cot": 4718, "literally": 4719, "herit": 4720, "reti": 4721, "hahaha": 4722, "experi": 4723, "likes": 4724, "gt": 4725, "steel": 4726, "____": 4727, "chair": 4728, "christian": 4729, "tower": 4730, "difference": 4731, "md": 4732, "tress": 4733, "mid": 4734, "prince": 4735, "african": 4736, "feder": 4737, "foot": 4738, "carri": 4739, "served": 4740, "rice": 4741, "shall": 4742, "featured": 4743, "cker": 4744, "recru": 4745, "poe": 4746, "sense": 4747, "nific": 4748, "comedy": 4749, "content": 4750, "fat": 4751, "posted": 4752, "contribu": 4753, "timate": 4754, "liver": 4755, "mble": 4756, "internet": 4757, "age": 4758, "european": 4759, "cling": 4760, "glad": 4761, "ffic": 4762, "sco": 4763, "akes": 4764, "elle": 4765, "termin": 4766, "tony": 4767, "pale": 4768, "colour": 4769, "serious": 4770, "patri": 4771, "movies": 4772, "bm": 4773, "professional": 4774, "ado": 4775, "alu": 4776, "bringing": 4777, "falls": 4778, "israel": 4779, "term": 4780, "language": 4781, "brook": 4782, "mann": 4783, "communic": 4784, "cannot": 4785, "acti": 4786, "phe": 4787, "yan": 4788, "entreprene": 4789, "turkey": 4790, "logical": 4791, "long": 4792, "arm": 4793, "urs": 4794, "workers": 4795, "ingly": 4796, "ggs": 4797, "ric": 4798, "tual": 4799, "receive": 4800, "opens": 4801, "gear": 4802, "social": 4803, "feet": 4804, "cking": 4805, "adver": 4806, "finan": 4807, "feels": 4808, "spla": 4809, "hr": 4810, "easter": 4811, "brain": 4812, "ãģ": 4813, "fig": 4814, "ledge": 4815, "nearly": 4816, "protect": 4817, "massive": 4818, "eth": 4819, "awa": 4820, "ðŁĺģ": 4821, "yrs": 4822, "awareness": 4823, "definitely": 4824, "kn": 4825, "imagine": 4826, "ku": 4827, "systems": 4828, "ðŁijı": 4829, "fas": 4830, "lik": 4831, "provide": 4832, "amo": 4833, "discover": 4834, "influ": 4835, "maker": 4836, "gaz": 4837, "fitness": 4838, "street": 4839, "ers": 4840, "ted": 4841, "wc": 4842, "ysis": 4843, "positive": 4844, "helped": 4845, "quest": 4846, "andrew": 4847, "brad": 4848, "bin": 4849, "hanging": 4850, "ling": 4851, "bright": 4852, "section": 4853, "mass": 4854, "ðŁĻĮ": 4855, "followers": 4856, "hosting": 4857, "tempor": 4858, "flag": 4859, "ave": 4860, "letter": 4861, "kur": 4862, "requi": 4863, "often": 4864, "cryp": 4865, "suff": 4866, "âļ½": 4867, "russian": 4868, "treatment": 4869, "alle": 4870, "hay": 4871, "lan": 4872, "keeping": 4873, "holy": 4874, "powerful": 4875, "predic": 4876, "fund": 4877, "especially": 4878, "window": 4879, "jewel": 4880, "ily": 4881, "ðŁēľ": 4882, "generation": 4883, "appa": 4884, "seriously": 4885, "od": 4886, "ðŁĺĤðŁĺĤðŁĺĤ": 4887, "certi": 4888, "irish": 4889, "ðŁijĮ": 4890, "miami": 4891, "beth": 4892, "vity": 4893, "secu": 4894, "chef": 4895, "crime": 4896, "graphy": 4897, "max": 4898, "artists": 4899, "revolu": 4900, "guard": 4901, "speech": 4902, "uc": 4903, "updates": 4904, "faces": 4905, "stant": 4906, "changed": 4907, "reports": 4908, "lower": 4909, "pear": 4910, "nc": 4911, "kil": 4912, "looked": 4913, "speaker": 4914, "sf": 4915, "respect": 4916, "okay": 4917, "ocean": 4918, "sitting": 4919, "architecture": 4920, "trail": 4921, "seat": 4922, "ira": 4923, "leg": 4924, "japanese": 4925, "dam": 4926, "ular": 4927, "swim": 4928, "politics": 4929, "financial": 4930, "old": 4931, "mouth": 4932, "attemp": 4933, "destin": 4934, "fishing": 4935, "attention": 4936, "mem": 4937, "changes": 4938, "decided": 4939, "religi": 4940, "gin": 4941, "cav": 4942, "zz": 4943, "adam": 4944, "mac": 4945, "write": 4946, "begin": 4947, "scul": 4948, "alter": 4949, "iss": 4950, "athon": 4951, "images": 4952, "moo": 4953, "joined": 4954, "ðŁĺī": 4955, "âŀ”ï¸ı": 4956, "passed": 4957, "musli": 4958, "hir": 4959, "largest": 4960, "camer": 4961, "comic": 4962, "ghted": 4963, "rugby": 4964, "burgh": 4965, "gging": 4966, "testing": 4967, "prepar": 4968, "laugh": 4969, "aled": 4970, "improve": 4971, "believ": 4972, "advice": 4973, "shares": 4974, "heart": 4975, "turning": 4976, "sb": 4977, "tel": 4978, "cafe": 4979, "nes": 4980, "daniel": 4981, "patter": 4982, "tz": 4983, "sett": 4984, "park": 4985, "cand": 4986, "stick": 4987, "happens": 4988, "brian": 4989, "newest": 4990, "epic": 4991, "ador": 4992, "kies": 4993, "warning": 4994, "animals": 4995, "custom": 4996, "arc": 4997, "dian": 4998, "gold": 4999, "core": 5000, "tf": 5001, "city": 5002, "pants": 5003, "reality": 5004, "confi": 5005, "inju": 5006, "fox": 5007, "guil": 5008, "knew": 5009, "âĺº": 5010, "correc": 5011, "itude": 5012, "dden": 5013, ".#": 5014, "reduc": 5015, "pass": 5016, "fon": 5017, "ya": 5018, "owner": 5019, "returns": 5020, "nc": 5021, "east": 5022, "apol": 5023, "insur": 5024, "tho": 5025, "sim": 5026, "junior": 5027, "bee": 5028, "angel": 5029, "attle": 5030, "electric": 5031, "horror": 5032, "crash": 5033, "eye": 5034, "path": 5035, "southern": 5036, "employe": 5037, "geo": 5038, "tan": 5039, "haz": 5040, "rally": 5041, "ðŁı»": 5042, "property": 5043, "wasn": 5044, "enjoyed": 5045, "grey": 5046, "gas": 5047, "brew": 5048, "northern": 5049, "holding": 5050, "gp": 5051, "take": 5052, "chart": 5053, "lyn": 5054, "drama": 5055, "zo": 5056, "paid": 5057, "throwback": 5058, "cup": 5059, "discussion": 5060, "downtown": 5061, "will": 5062, "lew": 5063, "bis": 5064, "tary": 5065, "bread": 5066, "upon": 5067, "rate": 5068, "teachers": 5069, "itation": 5070, "anced": 5071, "cycle": 5072, "choose": 5073, "dc": 5074, "iran": 5075, "cow": 5076, "dave": 5077, "raise": 5078, "princess": 5079, "faith": 5080, "->": 5081, "industri": 5082, "spain": 5083, "guitar": 5084, "facts": 5085, "mn": 5086, "spen": 5087, "courte": 5088, "gott": 5089, "projects": 5090, "audi": 5091, "osc": 5092, "peter": 5093, "sand": 5094, "interest": 5095, "happiness": 5096, "venue": 5097, "soldi": 5098, "surprise": 5099, "potential": 5100, "perio": 5101, "customer": 5102, "ii": 5103, "gni": 5104, "manufac": 5105, "eco": 5106, "broken": 5107, "singer": 5108, "vels": 5109, "wales": 5110, "hus": 5111, "inj": 5112, "four": 5113, "talent": 5114, "dying": 5115, "matthe": 5116, "film": 5117, "joining": 5118, "sell": 5119, "jar": 5120, "lmao": 5121, "surger": 5122, "bbc": 5123, "sources": 5124, "austin": 5125, "nik": 5126, "charles": 5127, "fam": 5128, "princi": 5129, "angel": 5130, "cash": 5131, "lot": 5132, "ored": 5133, "plays": 5134, "plate": 5135, "done": 5136, "memory": 5137, "brings": 5138, "nba": 5139, "solutions": 5140, "teaching": 5141, "grace": 5142, "circu": 5143, "helps": 5144, "founder": 5145, "mary": 5146, "explore": 5147, "decor": 5148, "parts": 5149, "cho": 5150, "integr": 5151, "hau": 5152, "ises": 5153, "putting": 5154, "iner": 5155, "rit": 5156, "vy": 5157, "michel": 5158, "blues": 5159, "everyday": 5160, "forms": 5161, "bio": 5162, "year": 5163, "pin": 5164, "tter": 5165, "spring": 5166, "))": 5167, "pot": 5168, "aling": 5169, "performing": 5170, "shan": 5171, "planet": 5172, "musical": 5173, "heads": 5174, "italian": 5175, "strugg": 5176, "âĢįâĻ": 5177, "wings": 5178, "pump": 5179, "hh": 5180, "trou": 5181, "aid": 5182, "prime": 5183, "earth": 5184, "paint": 5185, "mont": 5186, "amy": 5187, "bbc": 5188, "fabulous": 5189, "fruit": 5190, "android": 5191, "bourne": 5192, "ceremony": 5193, "ential": 5194, "??": 5195, "debate": 5196, "oning": 5197, "draft": 5198, "solar": 5199, "tx": 5200, "jam": 5201, "corn": 5202, "!!!!!": 5203, "broo": 5204, "milk": 5205, "posed": 5206, "ohi": 5207, "movement": 5208, "bren": 5209, "partner": 5210, "pg": 5211, "ette": 5212, "aries": 5213, "shout": 5214, "ng": 5215, "leaving": 5216, "tells": 5217, "sens": 5218, "taste": 5219, "kelly": 5220, "worl": 5221, "gym": 5222, "rich": 5223, "egy": 5224, "pid": 5225, "mas": 5226, "âĤ": 5227, "courtesy": 5228, "frank": 5229, "increase": 5230, "written": 5231, "ppers": 5232, "rel": 5233, "hai": 5234, "sas": 5235, "sound": 5236, "tti": 5237, "wich": 5238, "river": 5239, "...\"": 5240, "ag": 5241, "fellow": 5242, "rome": 5243, "small": 5244, "gency": 5245, "ican": 5246, "luxury": 5247, "proof": 5248, "met": 5249, "wildlife": 5250, "moments": 5251, "rather": 5252, "corner": 5253, "compe": 5254, "canadian": 5255, "likely": 5256, "therapy": 5257, "liam": 5258, "economic": 5259, "indie": 5260, "route": 5261, "fight": 5262, "hope": 5263, "setting": 5264, "antly": 5265, "cross": 5266, "fantasy": 5267, "dee": 5268, "sketch": 5269, "compli": 5270, "ymi": 5271, "rules": 5272, "engineering": 5273, "figure": 5274, "row": 5275, ".,": 5276, "fw": 5277, "sydney": 5278, "wou": 5279, "tation": 5280, "drew": 5281, "uses": 5282, "there": 5283, "spread": 5284, "structure": 5285, "patrick": 5286, "apparently": 5287, "ros": 5288, "hills": 5289, "wwe": 5290, "anny": 5291, "commission": 5292, "div": 5293, "fying": 5294, "consul": 5295, "analysis": 5296, "exi": 5297, "tennis": 5298, "vehicle": 5299, "ðŁĺŃðŁĺŃ": 5300, "ass": 5301, "highly": 5302, "opened": 5303, "bann": 5304, "ðŁēĻ": 5305, "mph": 5306, "wishing": 5307, "vor": 5308, "fif": 5309, "giveaway": 5310, "rr": 5311, "ray": 5312, "jess": 5313, "gat": 5314, "icymi": 5315, "xit": 5316, "highest": 5317, "york": 5318, "pie": 5319, "involved": 5320, "higher": 5321, "rie": 5322, "malay": 5323, "intelli": 5324, "despite": 5325, "chee": 5326, "sarah": 5327, "bean": 5328, "recogni": 5329, "arsen": 5330, "talented": 5331, "passion": 5332, "ich": 5333, "abc": 5334, "leads": 5335, "disease": 5336, "vis": 5337, "sec": 5338, "presenting": 5339, "milli": 5340, "hole": 5341, "shots": 5342, "depart": 5343, "surgery": 5344, "govt": 5345, "bin": 5346, "dual": 5347, "evi": 5348, "longer": 5349, "evol": 5350, "screen": 5351, "portrait": 5352, "etc": 5353, "lose": 5354, "chat": 5355, "pen": 5356, "pi": 5357, "oma": 5358, "sick": 5359, "erc": 5360, "companies": 5361, "entry": 5362, "plane": 5363, "gry": 5364, "vene": 5365, "liverpool": 5366, "premiere": 5367, "shared": 5368, "ared": 5369, "films": 5370, "ira": 5371, "holidays": 5372, "cricket": 5373, "ician": 5374, "ving": 5375, ".)": 5376, "ultimate": 5377, "division": 5378, "conduc": 5379, "sept": 5380, "forces": 5381, "mont": 5382, "smart": 5383, "disapp": 5384, "sunshine": 5385, "ind": 5386, "bless": 5387, "made": 5388, "colors": 5389, "frank": 5390, "iron": 5391, "bottle": 5392, "sgo": 5393, "mood": 5394, "jason": 5395, "eric": 5396, "birth": 5397, "teen": 5398, "response": 5399, "target": 5400, "statement": 5401, "fear": 5402, "thel": 5403, "alum": 5404, "arab": 5405, "blin": 5406, "direction": 5407, "steps": 5408, "erial": 5409, "worked": 5410, "atl": 5411, "ðŁēķ": 5412, "felt": 5413, "poli": 5414, "scenes": 5415, "homes": 5416, "bell": 5417, "eat": 5418, "ateful": 5419, "tin": 5420, "lace": 5421, "folks": 5422, "pse": 5423, "ann": 5424, "wisdom": 5425, "fav": 5426, "butter": 5427, "sr": 5428, "areas": 5429, "smoo": 5430, "biz": 5431, "dges": 5432, "appo": 5433, "more": 5434, "them": 5435, "effect": 5436, "windows": 5437, "sunny": 5438, "capital": 5439, "totally": 5440, "cities": 5441, "grant": 5442, "mbers": 5443, "slow": 5444, "autu": 5445, "ilities": 5446, "wro": 5447, "rising": 5448, "stics": 5449, "violence": 5450, "igh": 5451, "quot": 5452, "hit": 5453, "tc": 5454, "heritage": 5455, "buff": 5456, "nes": 5457, "zar": 5458, "dential": 5459, "exac": 5460, "edge": 5461, "deep": 5462, "arena": 5463, "became": 5464, "benefits": 5465, "marks": 5466, "mber": 5467, "az": 5468, "ames": 5469, "preci": 5470, "dragon": 5471, "reg": 5472, "dings": 5473, "dos": 5474, "ðŁēª": 5475, "nel": 5476, "sity": 5477, "meal": 5478, "dist": 5479, "legend": 5480, "purchase": 5481, "pical": 5482, "stick": 5483, "fat": 5484, "duba": 5485, "profess": 5486, "carto": 5487, "prof": 5488, "countries": 5489, "responsi": 5490, "sequ": 5491, "fab": 5492, "tribute": 5493, "honored": 5494, "practic": 5495, "purple": 5496, "anton": 5497, "pared": 5498, "tough": 5499, "summer": 5500, "environment": 5501, "sons": 5502, "ðŁĻı": 5503, "mps": 5504, "gies": 5505, "heroes": 5506, "telling": 5507, "henry": 5508, "fen": 5509, "knowledge": 5510, "Ģï¸ı": 5511, "fr": 5512, "neg": 5513, "ure": 5514, "acking": 5515, "hearts": 5516, "soo": 5517, "hollywood": 5518, "jump": 5519, "sauce": 5520, "schedule": 5521, "turn": 5522, "yoga": 5523, "creating": 5524, "cket": 5525, "creek": 5526, "âŃ": 5527, "customers": 5528, "madri": 5529, "gul": 5530, "assemb": 5531, "mount": 5532, "cell": 5533, "top": 5534, "stal": 5535, "davis": 5536, "twi": 5537, "sign": 5538, "premier": 5539, "itions": 5540, "hearing": 5541, "unk": 5542, "patients": 5543, "appear": 5544, "heaven": 5545, "alty": 5546, "doctor": 5547, "ae": 5548, "platform": 5549, "jeff": 5550, "ðŁĵ·": 5551, "regional": 5552, "bid": 5553, "boxing": 5554, "exten": 5555, "ority": 5556, "aw": 5557, "wise": 5558, "ille": 5559, "several": 5560, "bie": 5561, "situ": 5562, "syria": 5563, "âľħ": 5564, "reminder": 5565, "entertain": 5566, "lion": 5567, "partners": 5568, "inn": 5569, "phar": 5570, "fau": 5571, "pls": 5572, "expected": 5573, "sugar": 5574, "decision": 5575, "sb": 5576, "chron": 5577, "association": 5578, "leaves": 5579, "visited": 5580, "shap": 5581, "ðŁēĸ": 5582, "further": 5583, "hann": 5584, "wi": 5585, "runs": 5586, "ler": 5587, "funding": 5588, "filled": 5589, "......": 5590, "tiny": 5591, "hang": 5592, "org": 5593, "cool": 5594, "semin": 5595, "ðŁıĨ": 5596, "spons": 5597, "navy": 5598, "saint": 5599, "drug": 5600, "dal": 5601, "roun": 5602, "covered": 5603, "traditional": 5604, "investment": 5605, "dete": 5606, "alism": 5607, "flow": 5608, "nis": 5609, "sunrise": 5610, "feat": 5611, "fted": 5612, "weird": 5613, "jere": 5614, "vegan": 5615, "medicine": 5616, "ano": 5617, "accu": 5618, "delivery": 5619, "temple": 5620, "changing": 5621, "wilson": 5622, "philipp": 5623, "refe": 5624, "nd": 5625, "iser": 5626, "gay": 5627, "rand": 5628, "atives": 5629, "tely": 5630, "pand": 5631, "intellig": 5632, "gare": 5633, "ambas": 5634, "demon": 5635, "committee": 5636, "strategy": 5637, "refuge": 5638, "budget": 5639, "protec": 5640, "pier": 5641, "express": 5642, "nomin": 5643, "economy": 5644, "allow": 5645, "icon": 5646, "galax": 5647, "oh": 5648, "indivi": 5649, "demand": 5650, "virgin": 5651, "luke": 5652, "alists": 5653, "mani": 5654, "smi": 5655, "judge": 5656, "enty": 5657, "michi": 5658, "result": 5659, "amed": 5660, "speaks": 5661, "',": 5662, "houston": 5663, "shin": 5664, "bing": 5665, "fly": 5666, "chem": 5667, "auto": 5668, "vas": 5669, "get": 5670, "arm": 5671, "thanks": 5672, "din": 5673, "gang": 5674, "xx": 5675, "sion": 5676, "located": 5677, "pl": 5678, "josh": 5679, "info": 5680, "joins": 5681, "adverti": 5682, "otd": 5683, "eld": 5684, "sie": 5685, "reasons": 5686, "vent": 5687, "ðŁĩºðŁĩ¸": 5688, "âł": 5689, "conversation": 5690, "studi": 5691, "ðŁĶ„ðŁĶ„": 5692, "gos": 5693, "sounds": 5694, "unit": 5695, "musc": 5696, "gel": 5697, "acked": 5698, "paci": 5699, "cos": 5700, "dere": 5701, "uu": 5702, "ao": 5703, "lam": 5704, "inspiring": 5705, "arms": 5706, "tware": 5707, "matters": 5708, "addic": 5709, "dude": 5710, "ext": 5711, "crisis": 5712, "bath": 5713, "meet": 5714, "singh": 5715, "expect": 5716, "delhi": 5717, "rescue": 5718, "worst": 5719, "aug": 5720, "shipping": 5721, "serving": 5722, "sto": 5723, "dark": 5724, "aces": 5725, "historic": 5726, "landscape": 5727, "designer": 5728, "billion": 5729, "grateful": 5730, "wake": 5731, "eve": 5732, "miller": 5733, "housing": 5734, "dynam": 5735, "isco": 5736, "beha": 5737, "shop": 5738, "prou": 5739, "eas": 5740, "asia": 5741, "eding": 5742, "kon": 5743, "department": 5744, "awar": 5745, "marine": 5746, "inci": 5747, "photographer": 5748, "tape": 5749, "logo": 5750, "rings": 5751, "dit": 5752, "----": 5753, "vinyl": 5754, "wc": 5755, "voting": 5756, "seven": 5757, "ambassad": 5758, "dallas": 5759, "tu": 5760, "comment": 5761, "kra": 5762, "bles": 5763, "wag": 5764, "ud": 5765, "audio": 5766, "strike": 5767, "official": 5768, "ots": 5769, "metho": 5770, "tools": 5771, "radi": 5772, "alan": 5773, "hunt": 5774, "watched": 5775, "ake": 5776, "fake": 5777, "drinking": 5778, "merry": 5779, "ml": 5780, "bday": 5781, "rio": 5782, "nike": 5783, "cant": 5784, "repe": 5785, "costu": 5786, "murder": 5787, "akers": 5788, "chers": 5789, "outs": 5790, "beginning": 5791, "sos": 5792, "ades": 5793, "nin": 5794, "notes": 5795, "wrote": 5796, "solo": 5797, "ci": 5798, "lighting": 5799, "urban": 5800, "brexit": 5801, "attend": 5802, "shirts": 5803, "playo": 5804, "actress": 5805, "plic": 5806, "standard": 5807, "quotes": 5808, "parade": 5809, "ancient": 5810, "©": 5811, "turing": 5812, "ree": 5813, "primary": 5814, "flash": 5815, "citiz": 5816, "mates": 5817, "stein": 5818, "zi": 5819, "clinton": 5820, "skin": 5821, "gene": 5822, "hum": 5823, "gar": 5824, "tle": 5825, "yi": 5826, "focu": 5827, "dean": 5828, "plants": 5829, "cyber": 5830, "bu": 5831, "ome": 5832, "hop": 5833, "address": 5834, "tix": 5835, "gifts": 5836, "relationship": 5837, "subscri": 5838, "feed": 5839, "exactly": 5840, "hawks": 5841, "exo": 5842, "stress": 5843, "sn": 5844, "arrested": 5845, "ane": 5846, "software": 5847, "zero": 5848, "theme": 5849, "mumb": 5850, "immigr": 5851, "mia": 5852, "makeup": 5853, "pleasure": 5854, "univers": 5855, "harb": 5856, "engine": 5857, "aper": 5858, "rin": 5859, "bra": 5860, "institute": 5861, "leather": 5862, "alth": 5863, "singing": 5864, "cos": 5865, "ghty": 5866, "meas": 5867, "stic": 5868, "side": 5869, "insurance": 5870, "cot": 5871, "pitch": 5872, "mountains": 5873, "crimin": 5874, "supre": 5875, "valentine": 5876, "ater": 5877, "wouldn": 5878, "scale": 5879, "related": 5880, "regar": 5881, "startup": 5882, "packed": 5883, "mike": 5884, "weekly": 5885, "pts": 5886, "count": 5887, "har": 5888, "gotten": 5889, "mind": 5890, "berlin": 5891, "conditions": 5892, "switch": 5893, "corn": 5894, "save": 5895, "gli": 5896, "emergency": 5897, "tuned": 5898, "stock": 5899, "discussing": 5900, "everybody": 5901, "sday": 5902, "whether": 5903, "wrestling": 5904, "eces": 5905, "gender": 5906, "chen": 5907, "ðŁijĢ": 5908, "madrid": 5909, "marathon": 5910, "egg": 5911, "ier": 5912, "thx": 5913, "asking": 5914, "korea": 5915, "wolf": 5916, "aya": 5917, "gm": 5918, "gau": 5919, "atory": 5920, "vr": 5921, "grass": 5922, "killing": 5923, "bble": 5924, "uro": 5925, "uni": 5926, "eth": 5927, "shore": 5928, "then": 5929, "reale": 5930, "bottom": 5931, "exerc": 5932, "kar": 5933, "ories": 5934, "adri": 5935, "sands": 5936, "sex": 5937, ".'": 5938, "volunteers": 5939, "perform": 5940, "parliam": 5941, "include": 5942, "delighted": 5943, "executive": 5944, "fuel": 5945, "kiss": 5946, "ãħ": 5947, "charge": 5948, "hu": 5949, "cakes": 5950, "vet": 5951, "glu": 5952, "agree": 5953, "prices": 5954, "nau": 5955, "hl": 5956, "gru": 5957, "raj": 5958, "strength": 5959, "bic": 5960, "spending": 5961, "ales": 5962, "aven": 5963, "blast": 5964, ":(": 5965, "yof": 5966, "normal": 5967, "six": 5968, "quick": 5969, "sea": 5970, "daw": 5971, "meets": 5972, "lovers": 5973, "updated": 5974, "potat": 5975, "completed": 5976, "cook": 5977, "opportunities": 5978, "pure": 5979, "organic": 5980, "temper": 5981, "cam": 5982, "avoid": 5983, "parking": 5984, "dubai": 5985, "ando": 5986, "distri": 5987, "toy": 5988, "completely": 5989, "donald": 5990, "trial": 5991, "bass": 5992, "boun": 5993, "background": 5994, "vas": 5995, "marvel": 5996, "lum": 5997, "rus": 5998, "tool": 5999, "commissi": 6000, "throwback": 6001, "finding": 6002, "islam": 6003, "!?": 6004, "stop": 6005, "evil": 6006, "oral": 6007, "residents": 6008, "identi": 6009, "oak": 6010, "ðŁİ¶": 6011, "lil": 6012, "spanish": 6013, "chapter": 6014, "stopped": 6015, "direct": 6016, "hosted": 6017, "picked": 6018, "labour": 6019, "lewis": 6020, "defense": 6021, "Ć Ā®": 6022, "healthcare": 6023, "whis": 6024, "math": 6025, "peak": 6026, "raised": 6027, "fix": 6028, "bull": 6029, "thir": 6030, "chelsea": 6031, "folk": 6032, "tre": 6033, "candi": 6034, "paul": 6035, "either": 6036, "adam": 6037, "poetry": 6038, "jewelry": 6039, "ð٦": 6040, "pray": 6041, "ا": 6042, "gc": 6043, "oz": 6044, "wishes": 6045, "foreign": 6046, "sung": 6047, "learned": 6048, "ene": 6049, "ning": 6050, "michael": 6051, "illustration": 6052, "legendary": 6053, "wav": 6054, "bau": 6055, "ðŁļ¨": 6056, "calend": 6057, "streets": 6058, "âĨ": 6059, "monster": 6060, "buck": 6061, "gr": 6062, "school": 6063, "bath": 6064, "waste": 6065, "neck": 6066, "hawa": 6067, "beach": 6068, "replac": 6069, "ject": 6070, "oner": 6071, "factory": 6072, "count": 6073, "ðŁĵ¸": 6074, "morgan": 6075, "dering": 6076, "sean": 6077, "stephen": 6078, "dep": 6079, "novel": 6080, "videos": 6081, "ical": 6082, "pressure": 6083, "arsenal": 6084, "expre": 6085, "irs": 6086, "trending": 6087, "ssa": 6088, "flash": 6089, "resear": 6090, "through": 6091, "professor": 6092, "sculp": 6093, "tos": 6094, "gged": 6095, "mma": 6096, "bee": 6097, "ape": 6098, "hunter": 6099, "ami": 6100, "hei": 6101, "plastic": 6102, "bucks": 6103, "universe": 6104, "legen": 6105, "nigeria": 6106, "pleased": 6107, "ris": 6108, "thinks": 6109, "autumn": 6110, "ids": 6111, "dis": 6112, "anthony": 6113, "ðŁı½": 6114, "aked": 6115, "glasses": 6116, "finance": 6117, "zer": 6118, "kas": 6119, "contract": 6120, "numbers": 6121, "shaw": 6122, "partnership": 6123, "til": 6124, "launched": 6125, "sal": 6126, "victoria": 6127, "theater": 6128, "usual": 6129, "names": 6130, "period": 6131, "eliza": 6132, "ith": 6133, "barcel": 6134, "rocks": 6135, "bags": 6136, "mate": 6137, "distribu": 6138, "jon": 6139, "diffic": 6140, "alized": 6141, "curren": 6142, "scored": 6143, "bha": 6144, "dublin": 6145, "rose": 6146, "inted": 6147, "solid": 6148, "behavi": 6149, "walker": 6150, "simply": 6151, "gardens": 6152, "headed": 6153, "ini": 6154, "ohio": 6155, "weap": 6156, "fo": 6157, "glen": 6158, "estate": 6159, "random": 6160, "thunder": 6161, "thru": 6162, "kill": 6163, "jacket": 6164, "iti": 6165, "entertainment": 6166, "thanksgiving": 6167, "ental": 6168, "encoura": 6169, "elo": 6170, "ather": 6171, "tank": 6172, "highlights": 6173, "fting": 6174, "rule": 6175, "models": 6176, "border": 6177, "bjp": 6178, "husband": 6179, "indone": 6180, "kenya": 6181, "bears": 6182, "alo": 6183, "ninten": 6184, "pix": 6185, "stro": 6186, "orders": 6187, "salad": 6188, "roads": 6189, "nor": 6190, "lation": 6191, "sophi": 6192, "ðŁı¼": 6193, "pieces": 6194, "bone": 6195, "mins": 6196, "includes": 6197, "nutr": 6198, "phil": 6199, "sent": 6200, "fundra": 6201, "gain": 6202, "borough": 6203, "nad": 6204, "monday": 6205, "activity": 6206, "items": 6207, "becoming": 6208, "kenne": 6209, "detro": 6210, "cardi": 6211, "guests": 6212, "ux": 6213, "worldwide": 6214, "severe": 6215, "news": 6216, "thankful": 6217, "fiction": 6218, "vege": 6219, "mall": 6220, "sian": 6221, "eral": 6222, "injury": 6223, "lee": 6224, "menu": 6225, "dancing": 6226, "scotti": 6227, "example": 6228, "(#": 6229, "nai": 6230, "studios": 6231, "bai": 6232, "ðŁēĽ": 6233, "jav": 6234, "diamond": 6235, "vince": 6236, "rick": 6237, "protection": 6238, "lincol": 6239, "champs": 6240, "approach": 6241, "dar": 6242, "mile": 6243, "clouds": 6244, "jeff": 6245, "infin": 6246, "lers": 6247, "ples": 6248, "peace": 6249, "gop": 6250, "âĻ”": 6251, "techn": 6252, "stra": 6253, "average": 6254, "effort": 6255, "introducing": 6256, "diversity": 6257, "australian": 6258, "amp": 6259, "boost": 6260, "ske": 6261, "patient": 6262, "appreciate": 6263, "icians": 6264, "pur": 6265, "fell": 6266, "woods": 6267, "illustr": 6268, "ðŁĸ": 6269, "agency": 6270, "actions": 6271, "britain": 6272, "underway": 6273, "seattle": 6274, "eland": 6275, "ago": 6276, "fill": 6277, "streaming": 6278, "protest": 6279, "challenges": 6280, "kyo": 6281, "etsy": 6282, "cooking": 6283, "expert": 6284, "russ": 6285, "rainbow": 6286, "commercial": 6287, "spin": 6288, "beats": 6289, "cry": 6290, "valu": 6291, "eli": 6292, "throw": 6293, "grams": 6294, "levels": 6295, "michigan": 6296, "cad": 6297, "adorable": 6298, "constitu": 6299, "ws": 6300, "pub": 6301, "midnight": 6302, "that": 6303, "netfli": 6304, "brazil": 6305, "diego": 6306, "regular": 6307, "joy": 6308, "âĤ¬": 6309, "liqu": 6310, "eastern": 6311, "kni": 6312, "flat": 6313, "np": 6314, "brown": 6315, "wer": 6316, "sey": 6317, "tters": 6318, "acting": 6319, "vanc": 6320, "cycling": 6321, "programme": 6322, "raw": 6323, "complex": 6324, "tattoo": 6325, "throwbackthursday": 6326, "sessions": 6327, "rooms": 6328, "sight": 6329, "species": 6330, "bomb": 6331, "laugh": 6332, "keeps": 6333, "moon": 6334, "officers": 6335, "conver": 6336, "tr": 6337, "hash": 6338, "tack": 6339, "rious": 6340, "adap": 6341, "aj": 6342, "recogn": 6343, "expo": 6344, "sugge": 6345, "confirmed": 6346, "rolling": 6347, "dressing": 6348, "ict": 6349, "friday": 6350, "phones": 6351, "ridge": 6352, "concept": 6353, "roy": 6354, "keys": 6355, "effor": 6356, "cate": 6357, "kne": 6358, "even": 6359, "lay": 6360, "communities": 6361, "mod": 6362, "naz": 6363, "everywhere": 6364, "alab": 6365, "bitcoin": 6366, "banks": 6367, "outdoor": 6368, "federal": 6369, "stores": 6370, "hp": 6371, "cal": 6372, "mely": 6373, "signific": 6374, "bear": 6375, "republic": 6376, "closer": 6377, "allah": 6378, "pick": 6379, "xd": 6380, "palace": 6381, "chill": 6382, "bam": 6383, "erous": 6384, "una": 6385, "allen": 6386, "outstanding": 6387, "olympic": 6388, "supply": 6389, "figu": 6390, "vau": 6391, "lp": 6392, "charlie": 6393, "unes": 6394, ">>>": 6395, "legends": 6396, "icial": 6397, "coast": 6398, "benefit": 6399, "multi": 6400, "fits": 6401, "farmers": 6402, "amount": 6403, "sisters": 6404, "harve": 6405, "honey": 6406, "queen": 6407, "bers": 6408, "plann": 6409, "âŃIJ": 6410, "mu": 6411, "barcelona": 6412, "alber": 6413, "status": 6414, "remain": 6415, "extra": 6416, "candy": 6417, "vious": 6418, "âľĮ": 6419, "ov": 6420, "warriors": 6421, "-->": 6422, "jump": 6423, "amar": 6424, "xmas": 6425, "studies": 6426, "iors": 6427, "kor": 6428, "donate": 6429, "prep": 6430, "fish": 6431, "ima": 6432, "painted": 6433, "admini": 6434, "cosplay": 6435, "sports": 6436, "drops": 6437, "fighter": 6438, "evidence": 6439, "ðŁēª": 6440, "lake": 6441, "rob": 6442, "cinema": 6443, "profile": 6444, "ñ": 6445, "stands": 6446, "legacy": 6447, "shape": 6448, "roof": 6449, "civil": 6450, "ians": 6451, "syl": 6452, "sham": 6453, "voted": 6454, "retail": 6455, "philli": 6456, "listed": 6457, "duty": 6458, "nb": 6459, "thes": 6460, "fare": 6461, "auction": 6462, "fficial": 6463, "storms": 6464, "dp": 6465, "loun": 6466, "shops": 6467, "aly": 6468, "anime": 6469, "multiple": 6470, "ðŁĺįðŁĺį": 6471, "psycho": 6472, "jean": 6473, "apart": 6474, "candidate": 6475, "ggy": 6476, "conf": 6477, "joseph": 6478, "wick": 6479, "meat": 6480, "frame": 6481, "cl": 6482, "forgot": 6483, "phy": 6484, "fing": 6485, "lied": 6486, "rep": 6487, "seed": 6488, "fall": 6489, "ufc": 6490, "nut": 6491, "lind": 6492, "mode": 6493, "fields": 6494, "ence": 6495, "sley": 6496, "ð٤Ķ": 6497, "chill": 6498, "followed": 6499, "announces": 6500, "corru": 6501, "trophy": 6502, "themselves": 6503, "acle": 6504, "aldu": 6505, "kong": 6506, "lon": 6507, "sv": 6508, "broke": 6509, "anderson": 6510, "tai": 6511, "story": 6512, "temporary": 6513, "activities": 6514, "kati": 6515, "ariz": 6516, "crystal": 6517, "spoke": 6518, "extremely": 6519, "trading": 6520, "ðŁēļ": 6521, "ü": 6522, "inch": 6523, "edin": 6524, "outfit": 6525, "equip": 6526, "madi": 6527, "formed": 6528, "beef": 6529, "pop": 6530, "tiger": 6531, "thisday": 6532, "tired": 6533, "neighb": 6534, "retro": 6535, "isa": 6536, "unt": 6537, "tas": 6538, "kansas": 6539, "dest": 6540, "seconds": 6541, "tay": 6542, "hurric": 6543, "ou": 6544, "galaxy": 6545, "daddy": 6546, "brow": 6547, "burger": 6548, "enced": 6549, "desk": 6550, "accur": 6551, "secretary": 6552, "elite": 6553, "kab": 6554, "chin": 6555, "tourism": 6556, "buddy": 6557, "icide": 6558, "dressed": 6559, "ud": 6560, "vacation": 6561, "cheers": 6562, "comfor": 6563, "characters": 6564, "jet": 6565, "buying": 6566, "lins": 6567, "nap": 6568, "realestate": 6569, "lie": 6570, "afc": 6571, "iii": 6572, "fame": 6573, "nr": 6574, "bat": 6575, "agent": 6576, "makers": 6577, "â̼": 6578, "sector": 6579, "opti": 6580, "leon": 6581, "diet": 6582, "prayer": 6583, "hip": 6584, "mir": 6585, "lex": 6586, "bry": 6587, "ana": 6588, "passing": 6589, "wen": 6590, "recovery": 6591, "aki": 6592, "popul": 6593, "resort": 6594, "maria": 6595, "stuck": 6596, "reads": 6597, "tier": 6598, "perfec": 6599, "netflix": 6600, "poo": 6601, "champ": 6602, "oc": 6603, "reduce": 6604, "wered": 6605, "comments": 6606, "claim": 6607, "accident": 6608, "sag": 6609, "hack": 6610, "salt": 6611, "kinda": 6612, "killer": 6613, "ios": 6614, "zy": 6615, "exchange": 6616, "lecture": 6617, "enger": 6618, "icking": 6619, "tau": 6620, "reveals": 6621, "prison": 6622, "zom": 6623, "ghan": 6624, "ul": 6625, "journal": 6626, "iot": 6627, "trin": 6628, "jona": 6629, "governor": 6630, "cape": 6631, "quarter": 6632, "spective": 6633, "impressive": 6634, "babies": 6635, "tx": 6636, "mill": 6637, "oy": 6638, "harri": 6639, "joint": 6640, "sue": 6641, "collaboration": 6642, "trend": 6643, "revolution": 6644, "renew": 6645, "alumni": 6646, "gett": 6647, "shell": 6648, "sunday": 6649, "entu": 6650, "nic": 6651, "donaldtrump": 6652, "blockchain": 6653, "pacific": 6654, "explains": 6655, "spy": 6656, "advoc": 6657, "paradi": 6658, "tof": 6659, "starring": 6660, "pav": 6661, "feed": 6662, "brac": 6663, "smoke": 6664, "hamp": 6665, "yam": 6666, "tokyo": 6667, "simon": 6668, "dh": 6669, "effici": 6670, "physical": 6671, "nj": 6672, "elli": 6673, "slow": 6674, "graduate": 6675, "americans": 6676, "tify": 6677, "fred": 6678, "apore": 6679, "finds": 6680, "robin": 6681, "wet": 6682, "notice": 6683, "semi": 6684, "unve": 6685, "kom": 6686, "pilot": 6687, "screening": 6688, "daily": 6689, "ðŁēĹ": 6690, "royal": 6691, "spa": 6692, "votes": 6693, "nag": 6694, "whate": 6695, "attending": 6696, "experim": 6697, "addition": 6698, "kate": 6699, "stol": 6700, "mali": 6701, "foot": 6702, "christ": 6703, "chan": 6704, "dee": 6705, "licen": 6706, "global": 6707, "moore": 6708, "tia": 6709, "brigh": 6710, "mystery": 6711, "yay": 6712, "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, "creati": 6714, "mechan": 6715, "clock": 6716, "dic": 6717, "âĢĶ": 6718, "pper": 6719, "alph": 6720, "throughout": 6721, "allow": 6722, "resources": 6723, "selection": 6724, "hamil": 6725, "bbq": 6726, "aaaa": 6727, "virginia": 6728, "disney": 6729, "eng": 6730, "sored": 6731, "drinks": 6732, "fancy": 6733, "consider": 6734, "enda": 6735, "jane": 6736, "handmade": 6737, "dul": 6738, "ontari": 6739, "ius": 6740, "sville": 6741, "colorado": 6742, "whatever": 6743, "wheel": 6744, "promise": 6745, "never": 6746, "designs": 6747, "ably": 6748, "sexual": 6749, "vancou": 6750, "ati": 6751, "convention": 6752, "cultural": 6753, "singapore": 6754, "promo": 6755, "loaded": 6756, "glasgo": 6757, "ppl": 6758, "noo": 6759, "kee": 6760, "stem": 6761, "mention": 6762, "ido": 6763, "cruise": 6764, "riding": 6765, "becomes": 6766, "bey": 6767, "âļ½ï¸ı": 6768, "twin": 6769, "dedicated": 6770, "nash": 6771, "desi": 6772, "workout": 6773, "jenni": 6774, "iv": 6775, "groups": 6776, "relax": 6777, "phoeni": 6778, "lift": 6779, "mixed": 6780, "mck": 6781, "pc": 6782, "must": 6783, "metro": 6784, "cies": 6785, "yar": 6786, "aim": 6787, "anger": 6788, "ie": 6789, "recy": 6790, "married": 6791, "dropped": 6792, "engag": 6793, "lest": 6794, "ambassador": 6795, "oph": 6796, "des": 6797, "wick": 6798, "assistant": 6799, "natur": 6800, "fail": 6801, "ltd": 6802, "short": 6803, "kap": 6804, "shaw": 6805, "bigger": 6806, "remains": 6807, "critical": 6808, "survey": 6809, "coverage": 6810, "erson": 6811, "wind": 6812, "nb": 6813, "billy": 6814, "letes": 6815, "acts": 6816, "jimmy": 6817, "atlan": 6818, "aland": 6819, "tc": 6820, "importance": 6821, "damage": 6822, "fg": 6823, "storage": 6824, "twt": 6825, "bond": 6826, "balance": 6827, "crying": 6828, "puppy": 6829, "vote": 6830, "push": 6831, "ðŁēľ": 6832, "poly": 6833, "mel": 6834, "london": 6835, "terrori": 6836, "effective": 6837, "corporate": 6838, "atlanta": 6839, "jaco": 6840, "nasa": 6841, "greek": 6842, "senate": 6843, "ish": 6844, "eva": 6845, "intelligence": 6846, "efforts": 6847, "alco": 6848, "kun": 6849, "hall": 6850, "diag": 6851, "claims": 6852, "first": 6853, "hb": 6854, "bae": 6855, "vul": 6856, "pull": 6857, "°": 6858, "separ": 6859, "speed": 6860, "victi": 6861, "onthisday": 6862, "audience": 6863, "rates": 6864, "teach": 6865, "filming": 6866, "bush": 6867, "song": 6868, "yum": 6869, "brun": 6870, "raine": 6871, "awa": 6872, "parks": 6873, "ðĿ": 6874, "rabb": 6875, "rach": 6876, "raid": 6877, "reached": 6878, "rail": 6879, "moves": 6880, "selected": 6881, "fri": 6882, "raising": 6883, "omy": 6884, "stones": 6885, "suk": 6886, "francisco": 6887, "cases": 6888, "capit": 6889, "confu": 6890, "wtf": 6891, "poke": 6892, "equipment": 6893, "greg": 6894, "essential": 6895, "offering": 6896, "nex": 6897, "pies": 6898, "bec": 6899, "creation": 6900, "chairman": 6901, "crown": 6902, "wal": 6903, "johnny": 6904, "shift": 6905, "neck": 6906, "bang": 6907, "bird": 6908, "ðŁĺı": 6909, "duck": 6910, "reserve": 6911, "depu": 6912, "masters": 6913, "overall": 6914, "notic": 6915, "juice": 6916, "sneak": 6917, "cheer": 6918, "classes": 6919, "eagles": 6920, "nca": 6921, "carpet": 6922, "civil": 6923, "coaches": 6924, "harris": 6925, "ups": 6926, "balls": 6927, "decor": 6928, "martin": 6929, "ros": 6930, "vice": 6931, "announcement": 6932, "whose": 6933, "tigers": 6934, "stered": 6935, "cts": 6936, "dram": 6937, "steel": 6938, "young": 6939, "install": 6940, "suppo": 6941, "recording": 6942, "deck": 6943, "seats": 6944, "lder": 6945, "angle": 6946, "bot": 6947, "styles": 6948, "elections": 6949, "fortun": 6950, "nab": 6951, "butter": 6952, "arian": 6953, "kash": 6954, "inner": 6955, "oured": 6956, "beast": 6957, "wei": 6958, "iconic": 6959, "experts": 6960, "necess": 6961, "beng": 6962, "james": 6963, "lia": 6964, "greece": 6965, "ðŁĵ·": 6966, "ðŁĺģ": 6967, "goodbye": 6968, "mitch": 6969, "twice": 6970, "mumbai": 6971, "steam": 6972, "rush": 6973, "medal": 6974, "nett": 6975, "fashion": 6976, "tar": 6977, "rs": 6978, "saving": 6979, "ricul": 6980, "lm": 6981, "sleeping": 6982, "brooklyn": 6983, "miss": 6984, "sending": 6985, "discovered": 6986, "sphere": 6987, "oftheday": 6988, "kicks": 6989, "missions": 6990, "wright": 6991, "ern": 6992, "ghtly": 6993, "ious": 6994, "melbourne": 6995, "startu": 6996, "moved": 6997, "carry": 6998, "dak": 6999, "agues": 7000, "belgi": 7001, "ema": 7002, "wayne": 7003, "dot": 7004, "erie": 7005, "pel": 7006, "itunes": 7007, "matthew": 7008, "nobody": 7009, "estab": 7010, "calm": 7011, "winds": 7012, "luc": 7013, "prepare": 7014, "trends": 7015, "exercise": 7016, "advant": 7017, "ðŁē¯": 7018, "athletics": 7019, "apps": 7020, "ctions": 7021, "advance": 7022, "launches": 7023, "little": 7024, "realdonaldtrump": 7025, "elizabeth": 7026, "carolina": 7027, "hub": 7028, "hidden": 7029, "nw": 7030, "user": 7031, "poll": 7032, "greater": 7033, "most": 7034, "fed": 7035, "pat": 7036, "lifestyle": 7037, "sati": 7038, "scores": 7039, "marriage": 7040, "lr": 7041, "avenue": 7042, "deserve": 7043, "rif": 7044, "ðŁĹ": 7045, "watch": 7046, "championships": 7047, "gray": 7048, "enni": 7049, "cotton": 7050, "gom": 7051, "where": 7052, "package": 7053, "sum": 7054, "absolu": 7055, "newly": 7056, "foods": 7057, "tyler": 7058, "assembly": 7059, "muslim": 7060, "bank": 7061, "rememb": 7062, "options": 7063, "producer": 7064, "lando": 7065, "funds": 7066, "upper": 7067, "shadow": 7068, "progre": 7069, "cop": 7070, "inge": 7071, "legs": 7072, "detroit": 7073, "hillary": 7074, "jose": 7075, "giants": 7076, "soup": 7077, "sustainable": 7078, "tus": 7079, "clothes": 7080, "rocking": 7081, "nz": 7082, "minne": 7083, "materi": 7084, "bruce": 7085, "eart": 7086, "casting": 7087, "independent": 7088, "thousands": 7089, "tah": 7090, "decl": 7091, "veterans": 7092, "lions": 7093, "wrap": 7094, "â̦": 7095, "dess": 7096, "bling": 7097, "stine": 7098, "eggs": 7099, "oon": 7100, "closing": 7101, "zay": 7102, "att": 7103, "bacon": 7104, "fail": 7105, "arizona": 7106, "depre": 7107, "ghost": 7108, "newsp": 7109, "wers": 7110, "vip": 7111, "liked": 7112, "ident": 7113, "volunteer": 7114, "adult": 7115, "pupp": 7116, "circle": 7117, "material": 7118, "degree": 7119, "grown": 7120, "boom": 7121, "calendar": 7122, "sur": 7123, "viewing": 7124, "athletes": 7125, "chand": 7126, "rell": 7127, "asian": 7128, "entr": 7129, "volley": 7130, "victims": 7131, "body": 7132, "mama": 7133, "transfer": 7134, "geek": 7135, "indic": 7136, "saved": 7137, "mai": 7138, "gent": 7139, "its": 7140, "lounge": 7141, "kol": 7142, "theory": 7143, "situation": 7144, "islands": 7145, "arth": 7146, "zoo": 7147, "flood": 7148, "viously": 7149, "showed": 7150, "parliament": 7151, "chev": 7152, "eline": 7153, "attrac": 7154, "abad": 7155, "tail": 7156, "hrs": 7157, "lus": 7158, "portu": 7159, "gory": 7160, "provides": 7161, "toys": 7162, "death": 7163, "infe": 7164, "ance": 7165, "gle": 7166, "liam": 7167, "lover": 7168, "hud": 7169, "dvd": 7170, "revealed": 7171, "gw": 7172, "rement": 7173, "cathe": 7174, "lying": 7175, "radio": 7176, "derby": 7177, "stors": 7178, "chemi": 7179, "hospit": 7180, "⾨": 7181, "':": 7182, "ilove": 7183, "lemon": 7184, "republic": 7185, "sni": 7186, "ness": 7187, "door": 7188, "reaction": 7189, "pregn": 7190, "flav": 7191, "scholar": 7192, "spotify": 7193, "isation": 7194, "visual": 7195, "aware": 7196, "sponsored": 7197, "joke": 7198, "lessons": 7199, "legis": 7200, "lock": 7201, "simil": 7202, "ðŁĺĭ": 7203, "kind": 7204, "lay": 7205, "mah": 7206, "hoping": 7207, "vancouver": 7208, "aser": 7209, "cleaning": 7210, "gala": 7211, "threat": 7212, "lap": 7213, "ache": 7214, "romance": 7215, "expen": 7216, "repost": 7217, "zam": 7218, "epi": 7219, "mirror": 7220, "oak": 7221, "adul": 7222, "batman": 7223, "slu": 7224, "lc": 7225, "viewed": 7226, "reviews": 7227, "dates": 7228, "indonesia": 7229, "activi": 7230, "offen": 7231, "leaf": 7232, "isi": 7233, "agricul": 7234, "costume": 7235, "sites": 7236, "spiritu": 7237, "appearance": 7238, "iry": 7239, "stair": 7240, "application": 7241, "spectac": 7242, "icity": 7243, "skies": 7244, "handle": 7245, "punk": 7246, "paradise": 7247, "tn": 7248, "deal": 7249, "providing": 7250, "doc": 7251, "receiving": 7252, "brew": 7253, "microsoft": 7254, "ö": 7255, "ferr": 7256, "metro": 7257, "thail": 7258, "yum": 7259, "carter": 7260, "Ô": 7261, "gentle": 7262, "breaks": 7263, "cooper": 7264, "showcase": 7265, "cutting": 7266, "egypt": 7267, "baby": 7268, "seminar": 7269, "glori": 7270, "sson": 7271, "fave": 7272, "rehear": 7273, "lotte": 7274, "lady": 7275, "alas": 7276, "prep": 7277, "delivered": 7278, "nuclear": 7279, "iro": 7280, "engagement": 7281, "atta": 7282, "conven": 7283, "zan": 7284, "glory": 7285, "holds": 7286, "businesses": 7287, "strange": 7288, "sche": 7289, "itself": 7290, "grad": 7291, "markets": 7292, "falling": 7293, "stats": 7294, "geon": 7295, "budd": 7296, "lis": 7297, "sheet": 7298, "thisi": 7299, "colo": 7300, "desert": 7301, "registration": 7302, "ign": 7303, "explain": 7304, "interior": 7305, "laws": 7306, "writers": 7307, "springs": 7308, "kr": 7309, "fried": 7310, "bloom": 7311, "infra": 7312, "ao": 7313, "cred": 7314, "past": 7315, "lineup": 7316, "boo": 7317, "brea": 7318, "boots": 7319, "celebrity": 7320, "attacks": 7321, "brook": 7322, "eves": 7323, "excu": 7324, "cherry": 7325, "oop": 7326, "fascin": 7327, "boyfriend": 7328, "seas": 7329, "nine": 7330, "effects": 7331, "powered": 7332, "kha": 7333, "ðŁĺĢ": 7334, "shout": 7335, "condition": 7336, "ij": 7337, "hero": 7338, "enterpri": 7339, "winter": 7340, "applications": 7341, "shoe": 7342, "gel": 7343, "battle": 7344, "programs": 7345, "wart": 7346, "ðŁē„": 7347, "rap": 7348, "hol": 7349, "dangerous": 7350, "dia": 7351, "counter": 7352, "rics": 7353, "ior": 7354, "knight": 7355, "coat": 7356, "emotional": 7357, "atures": 7358, "das": 7359, "wheel": 7360, "forecast": 7361, "transport": 7362, "glasgow": 7363, "kingdom": 7364, "preparing": 7365, "immedi": 7366, "ffin": 7367, "awarded": 7368, "printing": 7369, "roman": 7370, "fighters": 7371, "anymore": 7372, "belt": 7373, "pine": 7374, "wine": 7375, "xi": 7376, "employees": 7377, "logies": 7378, "alled": 7379, "demo": 7380, "birthday": 7381, "angeles": 7382, "log": 7383, "drivers": 7384, "necklace": 7385, "kath": 7386, "sit": 7387, "athlete": 7388, "efs": 7389, "sburg": 7390, "purpose": 7391, "resistance": 7392, "releases": 7393, "tis": 7394, "various": 7395, "deliver": 7396, "chal": 7397, "sanc": 7398, "oppo": 7399, "craw": 7400, "neuro": 7401, "dra": 7402, "supporters": 7403, "snap": 7404, "difficult": 7405, "swear": 7406, "logist": 7407, "path": 7408, "attempt": 7409, "Ć Ā„": 7410, "swimming": 7411, "steve": 7412, "hurt": 7413, "included": 7414, "bap": 7415, "ware": 7416, "ðŁēĭ": 7417, "enders": 7418, "jake": 7419, "leeds": 7420, "climb": 7421, "lb": 7422, "imple": 7423, "lisa": 7424, "clothing": 7425, "ðŁĺİ": 7426, "dt": 7427, "compla": 7428, "swing": 7429, "straw": 7430, "vals": 7431, "kle": 7432, "users": 7433, "storm": 7434, "cuts": 7435, "ontario": 7436, "pan": 7437, "handsome": 7438, "iow": 7439, "argu": 7440, "checking": 7441, "scottish": 7442, "Ķï¸ı": 7443, "sier": 7444, "emma": 7445, "pod": 7446, "pattern": 7447, "desh": 7448, "enh": 7449, "edward": 7450, "ting": 7451, "kh": 7452, "half": 7453, "lincoln": 7454, "mother": 7455, "alleg": 7456, "rc": 7457, "volleyball": 7458, "dn": 7459, "gay": 7460, "ally": 7461, "leton": 7462, "grove": 7463, "loud": 7464, "advanced": 7465, "respec": 7466, "client": 7467, "supreme": 7468, "thailand": 7469, "how": 7470, "gig": 7471, "toi": 7472, "dot": 7473, "dollar": 7474, "ðŁijĩ": 7475, "pit": 7476, "rb": 7477, "hn": 7478, "produced": 7479, "ggers": 7480, "âĨē": 7481, "mlb": 7482, "canvas": 7483, "fineart": 7484, "usd": 7485, "inthe": 7486, "pson": 7487, "actual": 7488, "sl": 7489, "tb": 7490, "ipad": 7491, "ensure": 7492, "umb": 7493, "wd": 7494, "ska": 7495, "mars": 7496, "kend": 7497, "feli": 7498, "thing": 7499, "countdown": 7500, "absolute": 7501, "rout": 7502, "dral": 7503, "py": 7504, "injured": 7505, "mint": 7506, "hunting": 7507, "mmer": 7508, "sage": 7509, "ligh": 7510, "acity": 7511, "expan": 7512, "murray": 7513, "aro": 7514, "secure": 7515, "fourth": 7516, "eagle": 7517, "relief": 7518, "stakes": 7519, "industrial": 7520, "clark": 7521, "understanding": 7522, "seem": 7523, "plenty": 7524, "silver": 7525, "clau": 7526, "threat": 7527, "sail": 7528, "produce": 7529, "abstr": 7530, "isis": 7531, "br": 7532, "engers": 7533, "worry": 7534, "bieber": 7535, "sj": 7536, "justin": 7537, "realize": 7538, "kyle": 7539, "espn": 7540, "filter": 7541, "sch": 7542, "types": 7543, "gamedev": 7544, "ding": 7545, "twitter": 7546, "soldiers": 7547, "pom": 7548, "carbon": 7549, "yards": 7550, "childhood": 7551, "ried": 7552, "kel": 7553, "eleph": 7554, "tons": 7555, "keynote": 7556, "quiet": 7557, "wire": 7558, "posting": 7559, "issa": 7560, "representing": 7561, "backs": 7562, "alexander": 7563, "celebrates": 7564, "taining": 7565, "||": 7566, "chor": 7567, "escape": 7568, "peek": 7569, "tives": 7570, "field": 7571, "ssie": 7572, "impac": 7573, "sponsor": 7574, "rc": 7575, "wedd": 7576, "cannab": 7577, "sides": 7578, "tracks": 7579, "compar": 7580, "contrac": 7581, "technical": 7582, "bible": 7583, "exploring": 7584, "share": 7585, "trav": 7586, "nate": 7587, "illo": 7588, "scru": 7589, "mingham": 7590, "guns": 7591, "ofthe": 7592, "shame": 7593, "sees": 7594, "catho": 7595, "access": 7596, "cel": 7597, "reported": 7598, "»": 7599, "mario": 7600, "pad": 7601, "hopefully": 7602, "ouse": 7603, "yon": 7604, "disappo": 7605, "olo": 7606, "pitt": 7607, "pac": 7608, "gap": 7609, "crush": 7610, "sg": 7611, "kle": 7612, "gem": 7613, "empire": 7614, "dirty": 7615, "ais": 7616, "aviation": 7617, "zealand": 7618, "facing": 7619, "highway": 7620, "danny": 7621, "spider": 7622, "otta": 7623, "ðŁĺĦ": 7624, "wy": 7625, "colours": 7626, "infl": 7627, "costs": 7628, "olympics": 7629, "aus": 7630, "hm": 7631, "howard": 7632, "passes": 7633, "lauren": 7634, "mush": 7635, "opin": 7636, "rho": 7637, "discount": 7638, "operation": 7639, "emily": 7640, "mmm": 7641, "chamber": 7642, "dil": 7643, "toyo": 7644, "ship": 7645, "samu": 7646, "pictured": 7647, "unic": 7648, "pol": 7649, "keeper": 7650, "cartoon": 7651, "sten": 7652, "ignor": 7653, "nations": 7654, "nl": 7655, "tasting": 7656, "detail": 7657, "officials": 7658, "motor": 7659, "francis": 7660, "editor": 7661, "ðŁijĩ": 7662, "pets": 7663, "rangers": 7664, "tg": 7665, "rn": 7666, "wri": 7667, "nichol": 7668, "ise": 7669, "spots": 7670, "anie": 7671, "check": 7672, "triple": 7673, "kumar": 7674, "speakers": 7675, "icing": 7676, "prepared": 7677, "abuse": 7678, "friendship": 7679, "month": 7680, "swim": 7681, "aire": 7682, "scent": 7683, "hamilton": 7684, "indian": 7685, "jes": 7686, "yummy": 7687, "tears": 7688, "dawn": 7689, "ized": 7690, "worlds": 7691, "ðŁķ": 7692, "billi": 7693, "stone": 7694, "nhs": 7695, "basic": 7696, "por": 7697, "stle": 7698, "iron": 7699, "older": 7700, "clevel": 7701, "eing": 7702, "ðŁĺįðŁĺįðŁĺį": 7703, "prints": 7704, "firm": 7705, "aircraft": 7706, "finest": 7707, "develop": 7708, "aaron": 7709, "tz": 7710, "graham": 7711, "owners": 7712, "foli": 7713, "lesson": 7714, "ques": 7715, "babe": 7716, "craft": 7717, "phen": 7718, "jun": 7719, "birmingham": 7720, "vine": 7721, "ller": 7722, "ian": 7723, "fineartamerica": 7724, "evolu": 7725, "stab": 7726, "imper": 7727, "ward": 7728, "comic": 7729, "wiz": 7730, "invited": 7731, "duke": 7732, "match": 7733, "ports": 7734, "roger": 7735, "diagno": 7736, "kept": 7737, "test": 7738, "visu": 7739, "rhy": 7740, "soc": 7741, "tox": 7742, "baker": 7743, "surface": 7744, "covers": 7745, "mans": 7746, "bits": 7747, "xbox": 7748, "ffle": 7749, "nan": 7750, "gard": 7751, "hart": 7752, "waters": 7753, "villa": 7754, "retro": 7755, "lightning": 7756, "catholic": 7757, "democracy": 7758, "neighbor": 7759, "penn": 7760, "cran": 7761, "jonathan": 7762, "laura": 7763, "vibes": 7764, "sub": 7765, "coaching": 7766, "clearly": 7767, "ukraine": 7768, "brave": 7769, "commitment": 7770, "tall": 7771, "mart": 7772, "rap": 7773, "modi": 7774, "scott": 7775, "bros": 7776, "shower": 7777, "ðŁı¾": 7778, "âĺºï¸ı": 7779, "cousin": 7780, "approach": 7781, "bre": 7782, "compos": 7783, "hilari": 7784, "philly": 7785, "gad": 7786, "quickly": 7787, "rian": 7788, "tm": 7789, "virtual": 7790, "houses": 7791, "kt": 7792, "phoenix": 7793, "wire": 7794, "ffy": 7795, "bunch": 7796, "ancing": 7797, "tale": 7798, "snapchat": 7799, "starter": 7800, "ht": 7801, "kicking": 7802, "apart": 7803, "thy": 7804, ")!": 7805, "blogger": 7806, "itz": 7807, "comfort": 7808, "angels": 7809, "wash": 7810, "\":": 7811, "argent": 7812, "request": 7813, "honest": 7814, "mighty": 7815, "bobby": 7816, "kg": 7817, "rol": 7818, "thouse": 7819, "expo": 7820, "hc": 7821, "tables": 7822, "magical": 7823, "posts": 7824, "dem": 7825, "nw": 7826, "orlando": 7827, "aber": 7828, "***": 7829, "ðŁĺľ": 7830, "environmental": 7831, "transformation": 7832, "mile": 7833, "wic": 7834, "hiring": 7835, "maine": 7836, "boar": 7837, "rying": 7838, "tis": 7839, "niture": 7840, "tweeted": 7841, "antonio": 7842, "opinion": 7843, "finale": 7844, "diy": 7845, "fis": 7846, "thin": 7847, "trouble": 7848, "lego": 7849, "files": 7850, "quart": 7851, "spa": 7852, "currency": 7853, "climate": 7854, "fanart": 7855, "railway": 7856, "space": 7857, "bands": 7858, "daniel": 7859, "motion": 7860, "leng": 7861, "holder": 7862, "occu": 7863, "marie": 7864, "cathedral": 7865, "buzz": 7866, "bies": 7867, "nascar": 7868, "bmw": 7869, "battery": 7870, "charlotte": 7871, "doctor": 7872, "zzle": 7873, "seven": 7874, "insan": 7875, "ddy": 7876, "sten": 7877, "labor": 7878, "thrilled": 7879, "seren": 7880, "documentary": 7881, "waves": 7882, "certain": 7883, "candid": 7884, "allowed": 7885, "nintendo": 7886, "starwars": 7887, "tap": 7888, "homemade": 7889, "dles": 7890, "thering": 7891, "bree": 7892, "empty": 7893, "piano": 7894, "positi": 7895, "country": 7896, "pork": 7897, "puts": 7898, "perry": 7899, "matic": 7900, "spotlight": 7901, "tist": 7902, "orities": 7903, "wealth": 7904, "cp": 7905, "barbar": 7906, "committed": 7907, "assau": 7908, "profit": 7909, "eight": 7910, "hul": 7911, "finishing": 7912, "runner": 7913, "sso": 7914, "inspec": 7915, "charged": 7916, "christop": 7917, "losing": 7918, "coal": 7919, "hoo": 7920, "elev": 7921, "dele": 7922, "moham": 7923, "donation": 7924, "cable": 7925, "clinic": 7926, "jin": 7927, "managed": 7928, "tering": 7929, "â¬": 7930, "urban": 7931, "deputy": 7932, "bber": 7933, "burn": 7934, "academic": 7935, "ott": 7936, "stake": 7937, "iter": 7938, "stown": 7939, "acker": 7940, "adventures": 7941, "adams": 7942, "greg": 7943, "prom": 7944, "vol": 7945, "acqu": 7946, "congre": 7947, "paint": 7948, "citizens": 7949, "call": 7950, "afford": 7951, "vc": 7952, "asks": 7953, "thetic": 7954, "independence": 7955, "âĽ": 7956, "hitting": 7957, "blon": 7958, "future": 7959, "âı": 7960, "inno": 7961, "gene": 7962, "boards": 7963, "distance": 7964, "set": 7965, "remem": 7966, "thal": 7967, "prevent": 7968, "lang": 7969, "objec": 7970, "susp": 7971, "matt": 7972, "induc": 7973, "boro": 7974, "pione": 7975, "redi": 7976, "virtu": 7977, "printed": 7978, "scope": 7979, "shark": 7980, "succe": 7981, "astron": 7982, "illegal": 7983, "jag": 7984, "cting": 7985, "inee": 7986, "ato": 7987, "robin": 7988, "nutrition": 7989, "bf": 7990, "dutch": 7991, "bn": 7992, "furniture": 7993, "forgotten": 7994, "atar": 7995, "rup": 7996, "hyper": 7997, "branch": 7998, "communication": 7999, "degrees": 8000, "onia": 8001, "uncle": 8002, "promote": 8003, "orche": 8004, "wii": 8005, "js": 8006, "button": 8007, "major": 8008, "cbs": 8009, "bristol": 8010, "premium": 8011, "ordinary": 8012, "edit": 8013, "mg": 8014, "weed": 8015, "steven": 8016, ":'": 8017, "gus": 8018, "tes": 8019, "captured": 8020, "drugs": 8021, "dow": 8022, "writes": 8023, "bishop": 8024, "wheels": 8025, "alization": 8026, "discovery": 8027, "wr": 8028, "rachel": 8029, "neil": 8030, "hydr": 8031, "cutest": 8032, "entrepreneur": 8033, "korean": 8034, "oregon": 8035, "ulty": 8036, "perfectly": 8037, "supported": 8038, "historical": 8039, "twins": 8040, "elly": 8041, "wel": 8042, "devil": 8043, "income": 8044, "scientists": 8045, "deleg": 8046, "hen": 8047, "oni": 8048, "iced": 8049, "gio": 8050, "curry": 8051, "reveal": 8052, "eg": 8053, "buffalo": 8054, "nol": 8055, "opera": 8056, "cameron": 8057, "hahahaha": 8058, "jab": 8059, "graduation": 8060, "craig": 8061, "ral": 8062, "if": 8063, "organization": 8064, "lege": 8065, "gang": 8066, "sud": 8067, "edinburgh": 8068, "lack": 8069, "flies": 8070, "gate": 8071, "thrones": 8072, "qb": 8073, "thereal": 8074, "eleg": 8075, "ppin": 8076, "cles": 8077, "jamie": 8078, "tnam": 8079, "crypto": 8080, "oul": 8081, "pages": 8082, "ase": 8083, "roots": 8084, "stupid": 8085, "adid": 8086, "boot": 8087, "protein": 8088, "sap": 8089, "sium": 8090, "sus": 8091, "endor": 8092, "function": 8093, "dont": 8094, "enna": 8095, "chy": 8096, "sque": 8097, "worker": 8098, "mtv": 8099, "ea": 8100, "kan": 8101, "ðŁēļ": 8102, "mus": 8103, "profession": 8104, "tto": 8105, "operations": 8106, "allo": 8107, "ctor": 8108, "invite": 8109, "scand": 8110, "outh": 8111, "zim": 8112, "links": 8113, "clients": 8114, "samsung": 8115, "discusses": 8116, "nell": 8117, "ultra": 8118, "somewhere": 8119, "stewart": 8120, "inet": 8121, "dez": 8122, "bout": 8123, "factor": 8124, "tian": 8125, "trans": 8126, "jeremy": 8127, "db": 8128, "ðŁĩ¬": 8129, "orn": 8130, "developing": 8131, "spol": 8132, "cooper": 8133, "mau": 8134, "remembering": 8135, "trek": 8136, "family": 8137, "seniors": 8138, "foster": 8139, "attended": 8140, "wing": 8141, "transform": 8142, "elementary": 8143, "horiz": 8144, "listing": 8145, "malaysia": 8146, "itch": 8147, "warrior": 8148, "philippines": 8149, "russell": 8150, "mend": 8151, "initiative": 8152, "creep": 8153, "tops": 8154, "briti": 8155, "aur": 8156, "sharp": 8157, "advertising": 8158, "ugly": 8159, "achiev": 8160, "materials": 8161, "bug": 8162, "device": 8163, "bonus": 8164, "facility": 8165, "cole": 8166, "nhl": 8167, "yas": 8168, "planned": 8169, "pole": 8170, "excellence": 8171, "trick": 8172, "confl": 8173, "rp": 8174, "achieve": 8175, "loan": 8176, "swag": 8177, "jessica": 8178, "howe": 8179, "pour": 8180, "scu": 8181, "zoo": 8182, "rated": 8183, "dresses": 8184, "rebel": 8185, "mexican": 8186, "coordin": 8187, "mess": 8188, "atlantic": 8189, "tl": 8190, "oscar": 8191, "walks": 8192, "pharmac": 8193, "investigation": 8194, "...#": 8195, "cci": 8196, "easily": 8197, "mondaymotivation": 8198, "yment": 8199, "auti": 8200, "forced": 8201, "armed": 8202, "colleagues": 8203, "papers": 8204, "proper": 8205, "shake": 8206, "buc": 8207, "lean": 8208, "exhibit": 8209, "evement": 8210, "cott": 8211, "biz": 8212, "sper": 8213, "kent": 8214, "swan": 8215, "/@": 8216, "girlfriend": 8217, "hawk": 8218, "âĺĢï¸ı": 8219, "mono": 8220, "ðŁēĽ": 8221, "statue": 8222, "ðŁĺ³": 8223, "ras": 8224, "teeth": 8225, "precious": 8226, "tile": 8227, "pam": 8228, "swift": 8229, "vali": 8230, "nose": 8231, "drunk": 8232, "experiences": 8233, "comeback": 8234, "genius": 8235, "worse": 8236, "shef": 8237, "rad": 8238, "edit": 8239, "honour": 8240, "auspol": 8241, "larry": 8242, "hire": 8243, "gordon": 8244, "achievement": 8245, "........": 8246, "suicide": 8247, "alternative": 8248, "sup": 8249, "surroun": 8250, "shake": 8251, "keith": 8252, "pepper": 8253, "turk": 8254, "criminal": 8255, "beck": 8256, "sum": 8257, "walls": 8258, "cnn": 8259, "antic": 8260, "offe": 8261, "colli": 8262, "wines": 8263, "highlight": 8264, "hawaii": 8265, "embar": 8266, "lfc": 8267, "ðŁĩ®": 8268, "mv": 8269, ">>": 8270, "atmo": 8271, "word": 8272, "carl": 8273, "shoutout": 8274, "brewing": 8275, "ìĿ": 8276, "dof": 8277, "sic": 8278, "hottest": 8279, "colon": 8280, "hhh": 8281, "shut": 8282, "lowing": 8283, "volume": 8284, "apartment": 8285, "agreement": 8286, "destro": 8287, "wee": 8288, "religious": 8289, "iowa": 8290, "rod": 8291, "landing": 8292, "represent": 8293, "ðŁĵ·:": 8294, "las": 8295, "usually": 8296, "hl": 8297, "cac": 8298, "salv": 8299, "along": 8300, "laughing": 8301, "beans": 8302, "reminds": 8303, "phase": 8304, "somebody": 8305, "mask": 8306, "ranked": 8307, "destroy": 8308, "sci": 8309, "â̼ï¸ı": 8310, "gabri": 8311, "leo": 8312, "roa": 8313, "failed": 8314, "sil": 8315, "refugees": 8316, "revi": 8317, "ring": 8318, "berries": 8319, "cookies": 8320, "yy": 8321, "conservation": 8322, "shab": 8323, "humans": 8324, "determin": 8325, "ain": 8326, "niall": 8327, "assu": 8328, "mba": 8329, "from": 8330, "extreme": 8331, "vices": 8332, "commerce": 8333, "ghtful": 8334, "ordered": 8335, "supports": 8336, "recap": 8337, "vor": 8338, "dropping": 8339, "correct": 8340, "paying": 8341, "meaning": 8342, "nj": 8343, "quiz": 8344, "\"#": 8345, "business": 8346, "ðŁĩ®ðŁĩ": 8347, "indigen": 8348, "dust": 8349, "boxes": 8350, "blind": 8351, "xxx": 8352, "zzy": 8353, "ðŁĩ¬ðŁĩ": 8354, "ssels": 8355, "sant": 8356, "ddle": 8357, "hilarious": 8358, "design": 8359, "wondering": 8360, "vehicles": 8361, "kre": 8362, "jud": 8363, "reception": 8364, "parker": 8365, "ÃŃ": 8366, "privi": 8367, "hydro": 8368, "softball": 8369, "pollu": 8370, "locked": 8371, "bah": 8372, "ear": 8373, "script": 8374, "divi": 8375, "brace": 8376, "george": 8377, "theast": 8378, "belo": 8379, "jal": 8380, "tionary": 8381, "dental": 8382, "rocket": 8383, "purch": 8384, "shak": 8385, "manufacturing": 8386, "ez": 8387, "itis": 8388, "concep": 8389, "tball": 8390, "chs": 8391, "directed": 8392, "prayers": 8393, "ook": 8394, "philos": 8395, "variety": 8396, "chess": 8397, "server": 8398, "gand": 8399, "balti": 8400, "ðŁĵ¸": 8401, "sely": 8402, "cruz": 8403, "spectacular": 8404, "burning": 8405, "represent": 8406, "iz": 8407, "tone": 8408, "merce": 8409, "hell": 8410, "bedroom": 8411, "establi": 8412, "bol": 8413, "common": 8414, "ãĄ»": 8415, "abor": 8416, "kitty": 8417, "heights": 8418, "repair": 8419, "william": 8420, "quake": 8421, "alabama": 8422, "population": 8423, "rev": 8424, "rett": 8425, "ists": 8426, "nite": 8427, "lem": 8428, "aha": 8429, "cleveland": 8430, "rm": 8431, "pover": 8432, "obse": 8433, "montre": 8434, "mania": 8435, "®": 8436, "conne": 8437, "carni": 8438, "shah": 8439, "fy": 8440, "ua": 8441, "scor": 8442, "struggle": 8443, "bob": 8444, "''": 8445, "appropri": 8446, "decide": 8447, "ffed": 8448, "caster": 8449, "sort": 8450, "hungry": 8451, "drag": 8452, "Ć˜Ā§Ć™": 8453, "grounds": 8454, "dw": 8455, "slightly": 8456, "cardin": 8457, "deadline": 8458, "bronze": 8459, "webin": 8460, "barry": 8461, "silence": 8462, "euro": 8463, "option": 8464, "earn": 8465, "ðŁēĸ": 8466, "however": 8467, "naren": 8468, "nails": 8469, "bathroom": 8470, "vine": 8471, "phd": 8472, "mining": 8473, "garage": 8474, "()": 8475, "shoulder": 8476, "defeat": 8477, "dir": 8478, "ov": 8479, "liberty": 8480, "pleas": 8481, "xon": 8482, "compre": 8483, "av": 8484, "jin": 8485, "ables": 8486, "silent": 8487, "famili": 8488, "visits": 8489, "dipl": 8490, "habit": 8491, "millions": 8492, "regarding": 8493, "innovative": 8494, "senator": 8495, "rts": 8496, "von": 8497, "kl": 8498, "whil": 8499, "required": 8500, "âĿĦ": 8501, "luv": 8502, "presidential": 8503, "pocket": 8504, "hundre": 8505, "shown": 8506, "frozen": 8507, "toward": 8508, "fast": 8509, "confidence": 8510, "rough": 8511, "individual": 8512, "quet": 8513, "ðŁı½": 8514, "dome": 8515, "fifa": 8516, "engineer": 8517, "zen": 8518, "remix": 8519, "ðŁĺĄ": 8520, "plant": 8521, "minor": 8522, "robinson": 8523, "asy": 8524, "pulled": 8525, "certain": 8526, "potato": 8527, "(:": 8528, "pres": 8529, "occa": 8530, "wit": 8531, "item": 8532, "sie": 8533, "dating": 8534, "thompson": 8535, "owned": 8536, "anu": 8537, "vie": 8538, "tedly": 8539, "goodnight": 8540, "except": 8541, "ðŁĮŁ": 8542, "iraq": 8543, "kie": 8544, "rences": 8545, "lip": 8546, "similar": 8547, "saudi": 8548, "vig": 8549, "arthur": 8550, "picks": 8551, "milan": 8552, "honda": 8553, "maxi": 8554, "og": 8555, "stest": 8556, "arch": 8557, "analytics": 8558, "basti": 8559, "pearl": 8560, "terry": 8561, "horse": 8562, "astro": 8563, "acce": 8564, "launching": 8565, "international": 8566, "sno": 8567, "tasty": 8568, "denver": 8569, "irl": 8570, "pete": 8571, "torn": 8572, "advantage": 8573, "varsity": 8574, "\"\"": 8575, "sole": 8576, "gc": 8577, "lang": 8578, "demonstr": 8579, "olds": 8580, "unity": 8581, "nets": 8582, "inspire": 8583, "crete": 8584, "nashville": 8585, "nelson": 8586, "eter": 8587, "walk": 8588, "hyun": 8589, "mack": 8590, "treas": 8591, "seeking": 8592, "rage": 8593, "brush": 8594, "aband": 8595, "whilst": 8596, "cocon": 8597, "hong": 8598, "shelter": 8599, "ip": 8600, "possibly": 8601, "soo": 8602, "ited": 8603, "âĦ": 8604, "races": 8605, "warming": 8606, "quin": 8607, "television": 8608, "matches": 8609, "rapi": 8610, "mental": 8611, "palm": 8612, "jennifer": 8613, "rolls": 8614, "indiana": 8615, "bars": 8616, "catching": 8617, "rescu": 8618, "candidates": 8619, "fare": 8620, "âłĢ": 8621, "seo": 8622, "vietnam": 8623, "alpha": 8624, "michelle": 8625, "visible": 8626, "regre": 8627, "wned": 8628, "apple": 8629, "lip": 8630, "ffe": 8631, "liz": 8632, "yorkshire": 8633, "hail": 8634, "seasons": 8635, "began": 8636, "md": 8637, "kc": 8638, "lap": 8639, "fascinating": 8640, "help": 8641, "ury": 8642, "ums": 8643, "nuts": 8644, "sem": 8645, "alongside": 8646, "bridge": 8647, "orial": 8648, "ove": 8649, "worldcup": 8650, "british": 8651, "comfortable": 8652, "ive": 8653, "hotels": 8654, "fairs": 8655, "horri": 8656, "sox": 8657, "dining": 8658, "stream": 8659, "barri": 8660, "ssy": 8661, "wim": 8662, "terms": 8663, "vu": 8664, "pere": 8665, "lens": 8666, "walked": 8667, "ror": 8668, "lars": 8669, "shield": 8670, "doubt": 8671, "proto": 8672, "crossing": 8673, "meant": 8674, "medium": 8675, "adding": 8676, "eb": 8677, "cheap": 8678, "func": 8679, "paper": 8680, "brands": 8681, "ryan": 8682, "feedback": 8683, "collins": 8684, "unknown": 8685, "tropical": 8686, "sandwich": 8687, "fallen": 8688, "formu": 8689, "select": 8690, "loads": 8691, "answers": 8692, "ori": 8693, "maga": 8694, "dor": 8695, "duo": 8696, "alie": 8697, "drum": 8698, "uri": 8699, "deer": 8700, "soul": 8701, "shut": 8702, "âĺº": 8703, "stolen": 8704, "donated": 8705, "buzz": 8706, "patriots": 8707, "hal": 8708, "nasty": 8709, "nominated": 8710, "monte": 8711, "kia": 8712, "thri": 8713, "ingu": 8714, "tests": 8715, "petro": 8716, "ðŁijij": 8717, "hosts": 8718, "nest": 8719, "topic": 8720, "patch": 8721, "mmy": 8722, "hugh": 8723, "abilities": 8724, "mathe": 8725, "smiles": 8726, "gb": 8727, "agenda": 8728, "insights": 8729, "chip": 8730, "phan": 8731, "failure": 8732, "dgers": 8733, "hai": 8734, "significant": 8735, "shock": 8736, "rural": 8737, "glam": 8738, "figures": 8739, "potus": 8740, "ota": 8741, "ministry": 8742, "appears": 8743, "fear": 8744, "rh": 8745, "american": 8746, "hatt": 8747, "sony": 8748, "fires": 8749, "edi": 8750, "nou": 8751, "equi": 8752, "when": 8753, "universal": 8754, "madness": 8755, "ix": 8756, "sculpture": 8757, "bach": 8758, "tto": 8759, "sweden": 8760, "eta": 8761, "ento": 8762, "developed": 8763, "monthly": 8764, "maps": 8765, "rah": 8766, "led": 8767, "delta": 8768, "saints": 8769, "islam": 8770, "bench": 8771, "fifth": 8772, "vard": 8773, "socks": 8774, "welcoming": 8775, "je": 8776, "turner": 8777, "vb": 8778, "adi": 8779, "norway": 8780, "ady": 8781, "hurricane": 8782, "porsche": 8783, "tradition": 8784, "exam": 8785, "newspaper": 8786, "luci": 8787, "aver": 8788, "ideal": 8789, "dna": 8790, "madison": 8791, "ð٧": 8792, "witness": 8793, "acou": 8794, "insight": 8795, "simon": 8796, "robot": 8797, "snake": 8798, "nbc": 8799, "aco": 8800, "ross": 8801, "shment": 8802, "religion": 8803, "chann": 8804, "insu": 8805, "campbell": 8806, "installed": 8807, "weather": 8808, "horses": 8809, "oli": 8810, "robert": 8811, "kaz": 8812, "ðŁıĢ": 8813, "veteran": 8814, "thread": 8815, "quarter": 8816, "easier": 8817, "capture": 8818, "hipho": 8819, "lawrence": 8820, "romantic": 8821, "passion": 8822, "clay": 8823, "oxford": 8824, "thai": 8825, "studying": 8826, "fia": 8827, "elected": 8828, "mostly": 8829, "cb": 8830, "tumb": 8831, "âĢįâĻĤ": 8832, "xl": 8833, "shan": 8834, "faster": 8835, "evans": 8836, "slide": 8837, "shri": 8838, "seek": 8839, "mies": 8840, "chemistry": 8841, "pumpkin": 8842, "tum": 8843, ",,": 8844, "room": 8845, "fired": 8846, "lips": 8847, "presence": 8848, "aff": 8849, "brewery": 8850, "arrive": 8851, "swag": 8852, "photograph": 8853, "pengu": 8854, "chips": 8855, "attor": 8856, "values": 8857, "accurate": 8858, "contemporary": 8859, "principal": 8860, "cannabis": 8861, "ario": 8862, "anywhere": 8863, "gia": 8864, "democrats": 8865, "buildings": 8866, "lived": 8867, "aps": 8868, "negative": 8869, "mare": 8870, "ballo": 8871, "lion": 8872, "diamon": 8873, "look": 8874, "reform": 8875, "tommy": 8876, "illa": 8877, "treats": 8878, "hundreds": 8879, "portland": 8880, "worthy": 8881, "excep": 8882, "aria": 8883, "idol": 8884, "beer": 8885, "cdn": 8886, "yu": 8887, "awk": 8888, "ðŁĩ¨": 8889, "cells": 8890, "ó": 8891, "identity": 8892, "drawn": 8893, "devil": 8894, "finger": 8895, "tham": 8896, "ðŁijĬ": 8897, "earned": 8898, "fintech": 8899, "dolph": 8900, "tweeting": 8901, "evolution": 8902, "ðŁĵį": 8903, "estim": 8904, "mvp": 8905, "none": 8906, "ðŁĩºðŁĩ¸": 8907, "toyota": 8908, "aux": 8909, "marin": 8910, "bold": 8911, "lbs": 8912, "steak": 8913, "murphy": 8914, "itable": 8915, "louis": 8916, "solve": 8917, "pia": 8918, "skir": 8919, "illino": 8920, "webinar": 8921, "banana": 8922, "lov": 8923, "thon": 8924, "voters": 8925, "affordable": 8926, "defeated": 8927, "lmfa": 8928, "airlines": 8929, "superb": 8930, "anyway": 8931, "debt": 8932, "bored": 8933, "versi": 8934, "metal": 8935, "responsible": 8936, "mk": 8937, "sse": 8938, "fay": 8939, "caused": 8940, "fp": 8941, "recommend": 8942, "plaza": 8943, "sporting": 8944, "alliance": 8945, "austri": 8946, "nn": 8947, "tours": 8948, "surprised": 8949, "artif": 8950, "thunder": 8951, "surve": 8952, "wore": 8953, "brief": 8954, "necessary": 8955, "zie": 8956, "ashley": 8957, "drake": 8958, "rt": 8959, "knife": 8960, "immun": 8961, "charges": 8962, "athe": 8963, "bride": 8964, "reply": 8965, "gav": 8966, "broadcast": 8967, "puer": 8968, "bracelet": 8969, "capacity": 8970, "harvest": 8971, "idk": 8972, "performan": 8973, "dding": 8974, "ilers": 8975, "para": 8976, "jama": 8977, "province": 8978, "chin": 8979, "iders": 8980, "hari": 8981, "teaser": 8982, "chen": 8983, "restor": 8984, "rat": 8985, "flat": 8986, "colom": 8987, "ðŁēŀ": 8988, "ðŁĩ¨ðŁĩ": 8989, "smooth": 8990, "rt": 8991, "pitch": 8992, "staying": 8993, "israeli": 8994, "tcot": 8995, "perspective": 8996, "dock": 8997, "opener": 8998, "lovel": 8999, "xo": 9000, "classroom": 9001, "lington": 9002, "goal": 9003, "kennedy": 9004, "sham": 9005, "spaces": 9006, "mitchell": 9007, "homecoming": 9008, "uki": 9009, "claimed": 9010, "recruit": 9011, "ingo": 9012, "mufc": 9013, "monit": 9014, "groo": 9015, "resident": 9016, "percent": 9017, "perman": 9018, "ottawa": 9019, "intment": 9020, "anxi": 9021, "standards": 9022, "worship": 9023, "scheme": 9024, "fx": 9025, "potter": 9026, "bian": 9027, "athletic": 9028, "afgh": 9029, "sse": 9030, "satell": 9031, "parties": 9032, "âĿ¤âĿ¤": 9033, "infrastructure": 9034, "relax": 9035, "modu": 9036, "worn": 9037, "smoking": 9038, "yach": 9039, "practices": 9040, "wcw": 9041, "amb": 9042, "domestic": 9043, "taylor": 9044, "kentu": 9045, "provided": 9046, "modi": 9047, "veg": 9048, "\"...": 9049, "observ": 9050, "ðŁĺ©": 9051, "beard": 9052, "mour": 9053, "angry": 9054, "ðŁĺ±": 9055, "startups": 9056, "wooden": 9057, "dive": 9058, "nail": 9059, "antique": 9060, "roses": 9061, "tornado": 9062, "mat": 9063, "^^": 9064, "suspect": 9065, "farm": 9066, "devices": 9067, "mega": 9068, "tul": 9069, "scholarship": 9070, "gee": 9071, "disaster": 9072, "arrival": 9073, "poin": 9074, "marc": 9075, "katie": 9076, "bbed": 9077, "false": 9078, "deserves": 9079, "richard": 9080, "juana": 9081, "frey": 9082, "tioned": 9083, "hybri": 9084, "rw": 9085, "sarah": 9086, "achi": 9087, "cure": 9088, "ole": 9089, "morris": 9090, "chic": 9091, "broadway": 9092, "label": 9093, "pak": 9094, "poverty": 9095, "golf": 9096, "ered": 9097, "fu": 9098, "eries": 9099, "bees": 9100, "alogue": 9101, "stel": 9102, "wireless": 9103, "jewish": 9104, "tide": 9105, "blocked": 9106, "lifetime": 9107, "bhar": 9108, "split": 9109, "amster": 9110, "thi": 9111, "joshu": 9112, "brunch": 9113, "haps": 9114, "sfor": 9115, "oops": 9116, "kapoor": 9117, "hiking": 9118, "supposed": 9119, "roof": 9120, "reas": 9121, "train": 9122, "tight": 9123, "trump": 9124, "basically": 9125, "rr": 9126, "eared": 9127, "seeds": 9128, "entrance": 9129, "cp": 9130, "wie": 9131, "sonic": 9132, "victim": 9133, "here": 9134, "eh": 9135, "earrings": 9136, "salmon": 9137, "arctic": 9138, "anne": 9139, "dougla": 9140, "corruption": 9141, "hannah": 9142, "hasn": 9143, "voices": 9144, "conce": 9145, "atta": 9146, "fleet": 9147, "clinical": 9148, "democratic": 9149, "tony": 9150, "stood": 9151, "lef": 9152, "twitch": 9153, "ail": 9154, "honestly": 9155, "increased": 9156, "drome": 9157, "donna": 9158, "accepted": 9159, "visitors": 9160, "apar": 9161, "ador": 9162, "par": 9163, "jerry": 9164, "rai": 9165, "brandon": 9166, "abu": 9167, "!!!!!!": 9168, "meme": 9169, "ingh": 9170, "glorious": 9171, "bhu": 9172, "pump": 9173, "jol": 9174, "like": 9175, "fisher": 9176, "maz": 9177, "agan": 9178, "destination": 9179, "playlist": 9180, "letters": 9181, "genu": 9182, "brace": 9183, "celebrated": 9184, "banner": 9185, "rhe": 9186, "dragon": 9187, "ðŁĺħ": 9188, "signature": 9189, "grey": 9190, "âľĶï¸ı": 9191, "alice": 9192, "bered": 9193, "pher": 9194, "bern": 9195, "cath": 9196, "gathering": 9197, "scoring": 9198, "influence": 9199, "smiling": 9200, "dept": 9201, "local": 9202, "ax": 9203, "acu": 9204, "retirement": 9205, "honor": 9206, "herself": 9207, "chemical": 9208, "assess": 9209, "yall": 9210, "frequ": 9211, "appreciation": 9212, "aca": 9213, "choir": 9214, "cuz": 9215, "soil": 9216, "cil": 9217, "reporting": 9218, "uh": 9219, "enterprise": 9220, "grat": 9221, "jacob": 9222, "rum": 9223, "fee": 9224, "jak": 9225, "spin": 9226, "bikes": 9227, "phia": 9228, "stere": 9229, "pis": 9230, "blood": 9231, "tatt": 9232, "raft": 9233, "warren": 9234, "sheri": 9235, "backstage": 9236, "marsh": 9237, "hashtag": 9238, "therine": 9239, "rein": 9240, "gameday": 9241, "guaran": 9242, "recipes": 9243, "minds": 9244, "stronger": 9245, "issued": 9246, "bicy": 9247, "nak": 9248, "mented": 9249, "scary": 9250, "ux": 9251, "previous": 9252, "ttle": 9253, "thats": 9254, "actors": 9255, "uma": 9256, "tina": 9257, "bunny": 9258, "promotion": 9259, "uss": 9260, "oliver": 9261, "montreal": 9262, "whats": 9263, "appreciated": 9264, "lakes": 9265, "excuse": 9266, "knowing": 9267, "prizes": 9268, "muscle": 9269, "shades": 9270, "scot": 9271, "ingredi": 9272, "electronic": 9273, "juan": 9274, "combat": 9275, "sri": 9276, "eh": 9277, "turkish": 9278, "lom": 9279, "strikes": 9280, "prison": 9281, "ree": 9282, "pope": 9283, "vid": 9284, "oldest": 9285, "doll": 9286, "swiss": 9287, "certified": 9288, "clip": 9289, "returning": 9290, "lator": 9291, "leigh": 9292, "ttes": 9293, "watson": 9294, "healing": 9295, "elim": 9296, "perhaps": 9297, "hass": 9298, "kau": 9299, "dder": 9300, "mouse": 9301, "newcastle": 9302, "indigenous": 9303, "welcomes": 9304, "cole": 9305, "taught": 9306, "noise": 9307, "appear": 9308, "joe": 9309, "canon": 9310, "wednesday": 9311, "utah": 9312, "ctive": 9313, "driven": 9314, "iv": 9315, "cell": 9316, "strip": 9317, "acc": 9318, "focused": 9319, "arrest": 9320, "stocks": 9321, "woo": 9322, "âĹ": 9323, "noticed": 9324, "shado": 9325, "displa": 9326, "terror": 9327, "borne": 9328, "second": 9329, "queens": 9330, "woke": 9331, "jail": 9332, "nott": 9333, "cambridge": 9334, "hart": 9335, "seaf": 9336, "fax": 9337, "accept": 9338, "âĺħ": 9339, "goods": 9340, "kat": 9341, "twin": 9342, "hs": 9343, "thousand": 9344, "sins": 9345, "suite": 9346, "ampton": 9347, "arn": 9348, "relev": 9349, "richar": 9350, "hoops": 9351, "nbc": 9352, "classic": 9353, "pab": 9354, "soldier": 9355, "deplo": 9356, "leans": 9357, "installation": 9358, "clash": 9359, "leban": 9360, "eee": 9361, "tire": 9362, "beloved": 9363, "fusion": 9364, "traveling": 9365, "nei": 9366, "cookie": 9367, "globe": 9368, "physics": 9369, "sq": 9370, "col": 9371, "wolves": 9372, "dl": 9373, "exit": 9374, "\"-": 9375, "football": 9376, "leaf": 9377, "sterling": 9378, "hide": 9379, "minneso": 9380, "freshman": 9381, "nature": 9382, "indie": 9383, "supplies": 9384, "bris": 9385, "irish": 9386, "inktober": 9387, "doodle": 9388, "icop": 9389, "messages": 9390, "adults": 9391, "recorded": 9392, "fixed": 9393, "ardo": 9394, "offered": 9395, "underground": 9396, "drone": 9397, "pine": 9398, "mainten": 9399, "andre": 9400, "hammer": 9401, "sx": 9402, "round": 9403, "hike": 9404, "brad": 9405, "rome": 9406, "full": 9407, "oney": 9408, "rows": 9409, "columbia": 9410, "archives": 9411, "approved": 9412, "batch": 9413, "illinois": 9414, "recognition": 9415, "shouldn": 9416, "fog": 9417, "ncaa": 9418, "kevin": 9419, "humanity": 9420, "although": 9421, "powers": 9422, "pou": 9423, "sar": 9424, "pest": 9425, "alcohol": 9426, "consci": 9427, "philadel": 9428, "eno": 9429, "tm": 9430, "okla": 9431, "category": 9432, "participate": 9433, "accused": 9434, "brief": 9435, "poem": 9436, "clubs": 9437, "consult": 9438, "jab": 9439, "bigdata": 9440, "amsterdam": 9441, "acing": 9442, "certific": 9443, "nu": 9444, "dat": 9445, "improved": 9446, "andy": 9447, "campaig": 9448, "palestin": 9449, "pace": 9450, "mobi": 9451, "feelings": 9452, "wolf": 9453, "brain": 9454, "propos": 9455, "interactive": 9456, "prince": 9457, "index": 9458, "cis": 9459, "chae": 9460, "peaceful": 9461, "covering": 9462, "aco": 9463, "courses": 9464, "monkey": 9465, "replace": 9466, "bl": 9467, "bloody": 9468, "tales": 9469, "brighton": 9470, "neighborhood": 9471, "gates": 9472, "spiritual": 9473, "afraid": 9474, "breast": 9475, "bones": 9476, "ðŁijī": 9477, "video": 9478, "wau": 9479, "touch": 9480, "injuries": 9481, "carl": 9482, "rix": 9483, "unex": 9484, "âĢ¢": 9485, "fred": 9486, "considered": 9487, "thusi": 9488, "anch": 9489, "ony": 9490, "usa": 9491, "graphics": 9492, "acre": 9493, "ðŁĺ©": 9494, "commemor": 9495, "commod": 9496, "goti": 9497, "guardian": 9498, "starbucks": 9499, "prevention": 9500, "hahahaha": 9501, "administration": 9502, "portugal": 9503, "faculty": 9504, "beta": 9505, "ula": 9506, "albert": 9507, "breath": 9508, "eri": 9509, "letting": 9510, "tric": 9511, "mentation": 9512, "incredibly": 9513, "tennes": 9514, "vd": 9515, "ðŁĻĪ": 9516, "eddie": 9517, "brick": 9518, "grill": 9519, "btw": 9520, "watches": 9521, "researchers": 9522, "tney": 9523, "nie": 9524, "pas": 9525, "aster": 9526, "vibr": 9527, "pokemon": 9528, "chrome": 9529, "goat": 9530, "pitts": 9531, "illy": 9532, "festive": 9533, "yd": 9534, "canal": 9535, "ðŁĨ": 9536, "fies": 9537, "carlos": 9538, "reque": 9539, "partici": 9540, "trains": 9541, "sample": 9542, "temperature": 9543, "symph": 9544, "picking": 9545, "indoor": 9546, "zers": 9547, "playoffs": 9548, "________": 9549, "apes": 9550, "lyrics": 9551, "islamic": 9552, "performances": 9553, "dick": 9554, "spark": 9555, "seas": 9556, "homa": 9557, "ground": 9558, "disci": 9559, "employee": 9560, "commu": 9561, "alaska": 9562, "alan": 9563, "feast": 9564, "dging": 9565, "banking": 9566, "manuel": 9567, "slowly": 9568, "trucks": 9569, "mccar": 9570, "ooo": 9571, "scrat": 9572, "orchestra": 9573, "individu": 9574, "mx": 9575, "breath": 9576, "stairs": 9577, "equality": 9578, "blake": 9579, "locations": 9580, "coconut": 9581, "baltimore": 9582, "aaa": 9583, "lc": 9584, "ðŁıĨ": 9585, "harvey": 9586, "resist": 9587, "immigration": 9588, "adidas": 9589, "fili": 9590, "ref": 9591, "lgbt": 9592, "mos": 9593, "ppi": 9594, "kenny": 9595, "terror": 9596, "bane": 9597, "apolis": 9598, "sg": 9599, "socialmedia": 9600, "kai": 9601, "honest": 9602, "assas": 9603, "bollywood": 9604, "âĢįâĻĢï¸ı": 9605, "ferrari": 9606, "horn": 9607, "crypto": 9608, "boom": 9609, "maintenance": 9610, "idi": 9611, "sman": 9612, "wl": 9613, "extended": 9614, "insul": 9615, "ves": 9616, "gosp": 9617, "tri": 9618, "pig": 9619, "targe": 9620, "celer": 9621, "stati": 9622, "smh": 9623, "ridic": 9624, "appeal": 9625, "?)": 9626, "conclu": 9627, "cosme": 9628, "sheep": 9629, "christopher": 9630, "enthusi": 9631, "polish": 9632, "mets": 9633, "ounded": 9634, "sustainability": 9635, "creativity": 9636, "concrete": 9637, "rai": 9638, "alien": 9639, "bless": 9640, "tees": 9641, "club": 9642, "rot": 9643, "bos": 9644, "exist": 9645, "perfection": 9646, "luck": 9647, "rocky": 9648, "expensive": 9649, "meanwhile": 9650, "happybirthday": 9651, "pret": 9652, "thriller": 9653, "cave": 9654, "playoff": 9655, "somer": 9656, "lu": 9657, "lex": 9658, "defence": 9659, "amwriting": 9660, "homeless": 9661, "prophe": 9662, "chet": 9663, "pastor": 9664, "ðŁ¤£": 9665, "lander": 9666, "www": 9667, "Ģï¸ı": 9668, "tica": 9669, "!#": 9670, "otic": 9671, "radar": 9672, "posters": 9673, "powder": 9674, "poli": 9675, "haun": 9676, "trap": 9677, "blin": 9678, "assault": 9679, "shorts": 9680, "rey": 9681, "shy": 9682, "squir": 9683, "racist": 9684, "garlic": 9685, "fur": 9686, "remote": 9687, "smell": 9688, "impressed": 9689, "fingers": 9690, "âłĢ": 9691, "dino": 9692, "lement": 9693, "snu": 9694, "promoting": 9695, "string": 9696, "productive": 9697, "bage": 9698, "mason": 9699, "raz": 9700, "directly": 9701, "jk": 9702, "eval": 9703, "ðŁijĬ": 9704, "doctors": 9705, "cow": 9706, "rider": 9707, "stv": 9708, "remove": 9709, "wu": 9710, "nathan": 9711, "rod": 9712, "nr": 9713, "=>": 9714, "affected": 9715, "invest": 9716, "mption": 9717, "ginger": 9718, "od": 9719, "agriculture": 9720, "sque": 9721, "mug": 9722, "counting": 9723, "kee": 9724, "magnific": 9725, "cook": 9726, "anistan": 9727, "root": 9728, "placed": 9729, "sympo": 9730, "ghana": 9731, "und": 9732, "cheer": 9733, "throwing": 9734, "secrets": 9735, "filling": 9736, "optimi": 9737, "butterfly": 9738, "bubb": 9739, "ðŁĺī": 9740, "terrible": 9741, "dg": 9742, "silk": 9743, "obsessed": 9744, "lou": 9745, "aide": 9746, "salute": 9747, "monu": 9748, "philadelphia": 9749, "scientific": 9750, "ist": 9751, "uae": 9752, "dessert": 9753, "bottles": 9754, "canyon": 9755, "ðŁĺĪ": 9756, "carib": 9757, "other": 9758, "wich": 9759, "resource": 9760, "guilty": 9761, "und": 9762, "leon": 9763, "ess": 9764, "kane": 9765, "ele": 9766, "trainer": 9767, "heim": 9768, "ante": 9769, "manage": 9770, "rookie": 9771, "treated": 9772, "poses": 9773, "rsvp": 9774, "causes": 9775, "awak": 9776, "jewell": 9777, "lett": 9778, "onics": 9779, "titles": 9780, "cardiff": 9781, "gaga": 9782, "bump": 9783, "useful": 9784, "?!": 9785, "loose": 9786, "bbing": 9787, "::": 9788, "argentina": 9789, "debu": 9790, "cycl": 9791, "whel": 9792, "disgu": 9793, "jel": 9794, "kills": 9795, "biology": 9796, "exter": 9797, "trash": 9798, "bodies": 9799, "tram": 9800, "circuit": 9801, "expect": 9802, "lads": 9803, "wells": 9804, "shot": 9805, "gee": 9806, "narendr": 9807, "fastest": 9808, "bent": 9809, "bills": 9810, "marshall": 9811, "hats": 9812, "introduce": 9813, "citizen": 9814, "impossible": 9815, "gib": 9816, "azz": 9817, "networking": 9818, "rant": 9819, "think": 9820, "indy": 9821, "stops": 9822, "ftheday": 9823, "brian": 9824, "**": 9825, "amodi": 9826, "dome": 9827, "courage": 9828, "packing": 9829, "affairs": 9830, "gn": 9831, "sized": 9832, "entary": 9833, "poland": 9834, "switzer": 9835, "afghanistan": 9836, "wu": 9837, "tender": 9838, "subscribe": 9839, "mosco": 9840, "attend": 9841, "republican": 9842, "honey": 9843, "âĢĭ": 9844, "simul": 9845, "wester": 9846, "foodie": 9847, "oro": 9848, "middle": 9849, "abt": 9850, "copies": 9851, "maje": 9852, "narendramodi": 9853, "typical": 9854, "inspirational": 9855, "vitam": 9856, "wiscon": 9857, "cubs": 9858, "tivity": 9859, "hali": 9860, "ears": 9861, "kay": 9862, "dare": 9863, "marijuana": 9864, "curious": 9865, "ania": 9866, "tomato": 9867, "remind": 9868, "ðŁĩ·": 9869, "scared": 9870, "coup": 9871, "poet": 9872, "landed": 9873, "rid": 9874, "wrapped": 9875, "morri": 9876, "climbing": 9877, "ews": 9878, "feeding": 9879, "contra": 9880, "thology": 9881, "grid": 9882, "tively": 9883, "reader": 9884, "laser": 9885, "diving": 9886, "dig": 9887, "latin": 9888, "tied": 9889, "shakespe": 9890, "oci": 9891, "adm": 9892, "showers": 9893, "chuck": 9894, "marcus": 9895, "oos": 9896, "knee": 9897, "olive": 9898, "owl": 9899, "dylan": 9900, "anno": 9901, "gym": 9902, "decisions": 9903, "wellness": 9904, "arrives": 9905, "satis": 9906, "chris": 9907, "thurs": 9908, "ðŁ¤£": 9909, "interviews": 9910, "thankyou": 9911, "switzerland": 9912, "overnight": 9913, "journalist": 9914, "serves": 9915, "volcan": 9916, ".......": 9917, "plot": 9918, "nicol": 9919, "carrying": 9920, "magne": 9921, "treasure": 9922, "exp": 9923, "bever": 9924, "ðŁĺ¢": 9925, "marty": 9926, "mole": 9927, "donations": 9928, "recognized": 9929, "bh": 9930, "dus": 9931, "shann": 9932, "aldo": 9933, "successfully": 9934, "ente": 9935, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, "cabinet": 9937, "cuis": 9938, "titled": 9939, "das": 9940, "sol": 9941, "strategies": 9942, "delivering": 9943, "adds": 9944, "anian": 9945, "nether": 9946, "ðŁēĄ": 9947, "contain": 9948, "suits": 9949, "pairs": 9950, "todd": 9951, "rella": 9952, "rope": 9953, "cio": 9954, "crop": 9955, "paintings": 9956, "suz": 9957, "rejec": 9958, "bust": 9959, "dh": 9960, "fraud": 9961, "mh": 9962, "control": 9963, "jeal": 9964, "destroyed": 9965, "allows": 9966, "wool": 9967, "minnesota": 9968, "omen": 9969, "ju": 9970, "symposium": 9971, "daf": 9972, "limit": 9973, "accounts": 9974, "loading": 9975, "intern": 9976, "resolution": 9977, "holland": 9978, "qual": 9979, "meetings": 9980, "grave": 9981, "camping": 9982, "vam": 9983, "renov": 9984, "liberal": 9985, "amber": 9986, "gree": 9987, "humb": 9988, "fever": 9989, "eling": 9990, "brooks": 9991, "à²": 9992, "beth": 9993, "aded": 9994, "alt": 9995, "roe": 9996, "performed": 9997, "josh": 9998, "franklin": 9999, "nicole": 10000, "dess": 10001, "bbs": 10002, "mg": 10003, "networks": 10004, "minim": 10005, "alt": 10006, "weapons": 10007, "guy": 10008, "jason": 10009, "gha": 10010, "harbour": 10011, "aton": 10012, "praise": 10013, "kentucky": 10014, "belfast": 10015, "sticks": 10016, "bloss": 10017, "hopes": 10018, "anthro": 10019, "familiar": 10020, "wait": 10021, "chile": 10022, "depression": 10023, "lax": 10024, "jets": 10025, "leice": 10026, "receives": 10027, "sier": 10028, "ank": 10029, "dex": 10030, "indeed": 10031, "flexi": 10032, "fabric": 10033, "lamb": 10034, "helicop": 10035, "amanda": 10036, "âĢĶâĢĶ": 10037, "compete": 10038, "snack": 10039, "technologies": 10040, "syrian": 10041, "moms": 10042, "muham": 10043, "chosen": 10044, "anat": 10045, "devon": 10046, "sharks": 10047, "ret": 10048, "fundraiser": 10049, "selfies": 10050, "stations": 10051, "communications": 10052, "tennessee": 10053, "tutor": 10054, "rot": 10055, "valuable": 10056, "dynamic": 10057, "nurse": 10058, "ied": 10059, "earthquake": 10060, "deserved": 10061, "ave": 10062, "sara": 10063, "stretch": 10064, "douglas": 10065, "nepal": 10066, "ç": 10067, "obviously": 10068, "dame": 10069, "rape": 10070, "anybody": 10071, "kw": 10072, "patrol": 10073, "holders": 10074, "hanna": 10075, "infographic": 10076, "eco": 10077, "beating": 10078, "stanley": 10079, "boats": 10080, "ribb": 10081, "ez": 10082, "witch": 10083, "inva": 10084, "acid": 10085, "boarding": 10086, "-@": 10087, "gil": 10088, "dave": 10089, "careers": 10090, "oppos": 10091, "lloy": 10092, "inter": 10093, "dope": 10094, "resu": 10095, "jagu": 10096, "shade": 10097, "indy": 10098, "onist": 10099, "relations": 10100, "agen": 10101, "able": 10102, "incident": 10103, "meter": 10104, "sharma": 10105, "idr": 10106, "prove": 10107, "immediately": 10108, "troops": 10109, "aman": 10110, "glow": 10111, "gaza": 10112, "blocks": 10113, "personal": 10114, "chronic": 10115, "aller": 10116, "sid": 10117, "shr": 10118, "whatsapp": 10119, "lucy": 10120, "archae": 10121, "hou": 10122, "journalism": 10123, "ourselves": 10124, "got": 10125, "themed": 10126, "shaped": 10127, "weak": 10128, "casual": 10129, "length": 10130, "slam": 10131, "abbey": 10132, "ev": 10133, "counter": 10134, "esta": 10135, "recipi": 10136, "chapel": 10137, "expansion": 10138, "self": 10139, "suffering": 10140, "spice": 10141, "nz": 10142, "spart": 10143, "desper": 10144, "booking": 10145, "quarters": 10146, "yon": 10147, "ðŁēĹ": 10148, "pk": 10149, "continued": 10150, "-#": 10151, "manhatt": 10152, "talked": 10153, "shen": 10154, "combo": 10155, "hybrid": 10156, "jeans": 10157, "liquid": 10158, "seal": 10159, "retweets": 10160, "acceler": 10161, "collective": 10162, "tas": 10163, ":))": 10164, "professionals": 10165, "raw": 10166, "ott": 10167, "susan": 10168, "iring": 10169, "oklahoma": 10170, "reven": 10171, "survival": 10172, "creator": 10173, "transit": 10174, "stac": 10175, "surf": 10176, "ik": 10177, "editing": 10178, "chilling": 10179, "bailey": 10180, "steal": 10181, "rable": 10182, "parent": 10183, "hunger": 10184, "snapp": 10185, "collect": 10186, "philosoph": 10187, "dedication": 10188, "cf": 10189, "cm": 10190, "leep": 10191, "repeat": 10192, "reha": 10193, "unfortun": 10194, "aer": 10195, "aero": 10196, "abstract": 10197, "monitor": 10198, "agents": 10199, "bul": 10200, "science": 10201, "harbor": 10202, "dragons": 10203, "flooding": 10204, "accompli": 10205, "dash": 10206, "julia": 10207, "thered": 10208, "tuesday": 10209, "cyber": 10210, "blow": 10211, "tained": 10212, "lem": 10213, "reference": 10214, "ppo": 10215, "negoti": 10216, "charle": 10217, "connor": 10218, "ault": 10219, "accessories": 10220, "commissioner": 10221, "rainy": 10222, "rear": 10223, "advisory": 10224, "lucas": 10225, "maid": 10226, "coal": 10227, "kav": 10228, "polo": 10229, "ðŁı¾": 10230, "transport": 10231, "margare": 10232, "strawberry": 10233, "burns": 10234, "greens": 10235, "nev": 10236, "participants": 10237, "colin": 10238, "belgium": 10239, "colour": 10240, "inform": 10241, "dell": 10242, "bron": 10243, "caly": 10244, "kickoff": 10245, "strategic": 10246, "reunion": 10247, "honors": 10248, "lib": 10249, "egyp": 10250, "âŃIJï¸ı": 10251, "hypo": 10252, "sizes": 10253, "registered": 10254, "betes": 10255, "relaxing": 10256, "bloom": 10257, "intense": 10258, "valentines": 10259, "insane": 10260, "wwii": 10261, "px": 10262, "trio": 10263, "blade": 10264, "wisconsin": 10265, "cone": 10266, "platin": 10267, "alize": 10268, "raven": 10269, "increasing": 10270, "indians": 10271, "ilian": 10272, "blu": 10273, "rabbit": 10274, "extension": 10275, "jef": 10276, "audi": 10277, "ferry": 10278, "sell": 10279, "aday": 10280, "usb": 10281, "sweat": 10282, "champag": 10283, "method": 10284, "memph": 10285, "assist": 10286, "sby": 10287, "cape": 10288, "removed": 10289, "magn": 10290, "vt": 10291, "rams": 10292, "fbi": 10293, "tackle": 10294, "phew": 10295, "hon": 10296, "motorcycle": 10297, "suspec": 10298, "elephant": 10299, "subject": 10300, "lette": 10301, "dairy": 10302, "wheat": 10303, "awkward": 10304, "act": 10305, "trol": 10306, "mitted": 10307, "zayn": 10308, "sheriff": 10309, "enemy": 10310, "cons": 10311, "kett": 10312, "bulls": 10313, "evalu": 10314, "btc": 10315, "satellite": 10316, "holo": 10317, "porter": 10318, "diabetes": 10319, "better": 10320, "releasing": 10321, "surf": 10322, ":-": 10323, "sebasti": 10324, "collecting": 10325, "encing": 10326, "ethi": 10327, "gods": 10328, "alley": 10329, "healthy": 10330, "mills": 10331, "smash": 10332, "copper": 10333, "crack": 10334, "readers": 10335, "spac": 10336, "license": 10337, "basket": 10338, "bangla": 10339, "entic": 10340, "omi": 10341, "mere": 10342, "sively": 10343, "animation": 10344, "lanes": 10345, "dentally": 10346, "chillin": 10347, "fie": 10348, "karen": 10349, "depth": 10350, "lipse": 10351, "ng": 10352, "rip": 10353, "melo": 10354, "sandy": 10355, "ðŁijıðŁijı": 10356, "vincent": 10357, "nut": 10358, "hug": 10359, "whole": 10360, "creates": 10361, "????": 10362, "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, "baked": 10364, "upgrade": 10365, "roberts": 10366, "hara": 10367, "caribbean": 10368, "authentic": 10369, "mbs": 10370, "moscow": 10371, "attorney": 10372, "wiki": 10373, "chlo": 10374, "hull": 10375, "cork": 10376, "\"!": 10377, "stylish": 10378, "ðŁĵ¸:": 10379, "diary": 10380, "improving": 10381, "expand": 10382, "bright": 10383, "pollution": 10384, "knights": 10385, "personality": 10386, "checked": 10387, "facilities": 10388, "zel": 10389, "bowling": 10390, "guer": 10391, "ðŁİĤ": 10392, "ongoing": 10393, "units": 10394, "hook": 10395, "beck": 10396, "conflict": 10397, "todd": 10398, "farming": 10399, "educational": 10400, "kak": 10401, "clay": 10402, "stroke": 10403, "belly": 10404, "explore": 10405, "millenni": 10406, "thm": 10407, "loop": 10408, "sms": 10409, "consist": 10410, "circa": 10411, "bryan": 10412, "dab": 10413, "younger": 10414, "solidar": 10415, "ppa": 10416, "experienced": 10417, "bella": 10418, "board": 10419, "sheffield": 10420, "stephen": 10421, "consumer": 10422, "submit": 10423, "sponsor": 10424, "tang": 10425, "aggre": 10426, "combined": 10427, "tracking": 10428, "sanders": 10429, "baz": 10430, "survive": 10431, "ferred": 10432, "equal": 10433, "sep": 10434, "reed": 10435, "strong": 10436, "privacy": 10437, "stap": 10438, "ung": 10439, "acry": 10440, "pasta": 10441, "pirates": 10442, "ager": 10443, "fairy": 10444, "dup": 10445, "introduced": 10446, "wip": 10447, "lets": 10448, "spray": 10449, "ðŁĵº": 10450, "grew": 10451, "asts": 10452, "pittsburgh": 10453, "newyork": 10454, "joey": 10455, "lauren": 10456, "trade": 10457, "chop": 10458, "pipe": 10459, "claire": 10460, "behavior": 10461, "vap": 10462, "crews": 10463, "laptop": 10464, "ð٤Ĺ": 10465, "chester": 10466, "discipl": 10467, "df": 10468, "outdoors": 10469, "ks": 10470, "gover": 10471, "superstar": 10472, "casino": 10473, "farmer": 10474, ";-)": 10475, "returned": 10476, "ðŁıĪ": 10477, "mail": 10478, "roasted": 10479, "costa": 10480, "vill": 10481, "pez": 10482, "gardening": 10483, "distribution": 10484, "shining": 10485, "investors": 10486, "rasp": 10487, "decades": 10488, "realized": 10489, "barn": 10490, "pti": 10491, "stable": 10492, "utd": 10493, "panthers": 10494, "mens": 10495, "bn": 10496, "cade": 10497, "bucket": 10498, "ynn": 10499, "whenever": 10500, "wake": 10501, "dais": 10502, "bernie": 10503, "lodge": 10504, "julie": 10505, "atmosphere": 10506, "ðŁĺĺðŁĺĺ": 10507, "majority": 10508, "parti": 10509, "excit": 10510, "cut": 10511, "meh": 10512, "muslims": 10513, "begun": 10514, "flights": 10515, "veness": 10516, "ceme": 10517, "posing": 10518, "sole": 10519, "gou": 10520, "darkness": 10521, "peach": 10522, "celtic": 10523, "authority": 10524, "grandma": 10525, "fulness": 10526, "smith": 10527, "specific": 10528, "garcia": 10529, "coins": 10530, "goodness": 10531, "aldub": 10532, "recruiting": 10533, "dennis": 10534, "gary": 10535, "sleeve": 10536, "weapon": 10537, "plz": 10538, "discover": 10539, "harrison": 10540, "recruitment": 10541, "jai": 10542, "chim": 10543, "compared": 10544, "toms": 10545, "mothers": 10546, "amy": 10547, "archive": 10548, "task": 10549, "benjam": 10550, "seg": 10551, "lawyer": 10552, "alum": 10553, "investing": 10554, "mie": 10555, "chez": 10556, "jp": 10557, "ake": 10558, "flam": 10559, "wallpaper": 10560, "âĻ„ï¸ı": 10561, "tton": 10562, "chest": 10563, "favorites": 10564, "weigh": 10565, "coolest": 10566, "rating": 10567, "relevant": 10568, "logan": 10569, "maple": 10570, "runners": 10571, "prior": 10572, "people": 10573, "maur": 10574, "terrorist": 10575, "tested": 10576, "carnival": 10577, "suspen": 10578, "measure": 10579, "mv": 10580, "cybersecurity": 10581, "appren": 10582, "terrorism": 10583, "oz": 10584, "vital": 10585, "nies": 10586, "gonz": 10587, "funded": 10588, "twist": 10589, "assessment": 10590, "diesel": 10591, "enfor": 10592, "column": 10593, "addressing": 10594, "casts": 10595, "payment": 10596, "xton": 10597, "fier": 10598, ",'": 10599, "last": 10600, "nee": 10601, "unless": 10602, "close": 10603, "skill": 10604, "cuisine": 10605, "funeral": 10606, "tiles": 10607, "aun": 10608, "kru": 10609, "relationships": 10610, "ðŁē¯": 10611, "event": 10612, "âĢįâĻĤï¸ı": 10613, "kindness": 10614, "proposed": 10615, "acoustic": 10616, "aes": 10617, "defender": 10618, "dance": 10619, "htt": 10620, "wat": 10621, "voy": 10622, "ð٤ĺ": 10623, "aus": 10624, "cliff": 10625, "searching": 10626, "beautifully": 10627, "inqu": 10628, "atl": 10629, "specialist": 10630, "ðŁIJ¶": 10631, "dai": 10632, "trails": 10633, "classics": 10634, "instant": 10635, "vous": 10636, "revenue": 10637, "march": 10638, "kirk": 10639, "fringe": 10640, "fireworks": 10641, "trivia": 10642, "âĺħ": 10643, "traction": 10644, "walter": 10645, "moto": 10646, "lily": 10647, "attitude": 10648, "climb": 10649, "scan": 10650, "savings": 10651, "cw": 10652, "faith": 10653, "credits": 10654, "abled": 10655, "graff": 10656, "autograph": 10657, "hehe": 10658, "ranch": 10659, "had": 10660, "rogers": 10661, "ðŁĮ¹": 10662, "fin": 10663, "requ": 10664, "folk": 10665, "additional": 10666, "lynn": 10667, "uber": 10668, "dollars": 10669, "logic": 10670, "worth": 10671, "som": 10672, "thesis": 10673, "pound": 10674, "bic": 10675, "stur": 10676, "ceram": 10677, "spencer": 10678, "entered": 10679, "vamp": 10680, "organized": 10681, "âľĪ": 10682, "pps": 10683, "tron": 10684, "mercedes": 10685, "noti": 10686, "competitive": 10687, "dow": 10688, "ousness": 10689, "victor": 10690, "grilled": 10691, "nai": 10692, "putin": 10693, "abra": 10694, "blame": 10695, "alexand": 10696, "animal": 10697, "decent": 10698, "pent": 10699, "interior": 10700, ":')": 10701, "butler": 10702, "ballet": 10703, "ðŁēĶ": 10704, "albums": 10705, "downs": 10706, "lad": 10707, "sir": 10708, "plain": 10709, "pers": 10710, "blonde": 10711, "disc": 10712, "pakistan": 10713, "sement": 10714, "gaa": 10715, "wage": 10716, "chas": 10717, "mani": 10718, "cops": 10719, "territ": 10720, "lol": 10721, "laughter": 10722, "rivers": 10723, "magnificent": 10724, "lamp": 10725, "wb": 10726, "newsle": 10727, "charts": 10728, "blessing": 10729, "punch": 10730, "longest": 10731, "floral": 10732, "cutie": 10733, "farewell": 10734, "stopping": 10735, "mbb": 10736, "bud": 10737, "cheese": 10738, "decla": 10739, "sim": 10740, "mcdonald": 10741, "deter": 10742, "youth": 10743, "tch": 10744, "freder": 10745, "kindle": 10746, "fern": 10747, "ator": 10748, "asleep": 10749, "pond": 10750, "sprint": 10751, "pounds": 10752, "lazy": 10753, "ghe": 10754, "fundraising": 10755, "deadly": 10756, "grande": 10757, "doug": 10758, "hey": 10759, "linda": 10760, "considering": 10761, "ium": 10762, "golden": 10763, "vik": 10764, "authors": 10765, "diss": 10766, "ually": 10767, "appropriate": 10768, "morning": 10769, "yle": 10770, "honoring": 10771, "folio": 10772, "bec": 10773, "rebec": 10774, "finland": 10775, "formula": 10776, "cornwall": 10777, "shay": 10778, "causing": 10779, "blend": 10780, "signal": 10781, "tent": 10782, "kashmir": 10783, "nationals": 10784, "harmony": 10785, "scout": 10786, "accessi": 10787, "height": 10788, "medieval": 10789, "improvement": 10790, "kees": 10791, "practical": 10792, "card": 10793, "depar": 10794, "hun": 10795, "oming": 10796, "calgary": 10797, "stel": 10798, "bubble": 10799, "guru": 10800, "mah": 10801, "unexpe": 10802, "nh": 10803, "eda": 10804, "meat": 10805, "ige": 10806, "sio": 10807, "goddess": 10808, "inches": 10809, "tunes": 10810, "britt": 10811, "stion": 10812, "raj": 10813, "âĻ«": 10814, "mercy": 10815, "ðŁēĺ": 10816, "sends": 10817, "iest": 10818, "polici": 10819, "vale": 10820, "reduced": 10821, "asap": 10822, "vijay": 10823, "defensive": 10824, "celebrations": 10825, "riders": 10826, "meditation": 10827, "harmon": 10828, "ging": 10829, "”": 10830, "programming": 10831, "inau": 10832, "sudden": 10833, "mh": 10834, "replacement": 10835, "sku": 10836, "jar": 10837, "grades": 10838, "tast": 10839, "kitt": 10840, "branding": 10841, "kaw": 10842, "boot": 10843, "fought": 10844, "pays": 10845, "gf": 10846, "ization": 10847, "hop": 10848, "kk": 10849, "activist": 10850, "vend": 10851, "coastal": 10852, "chaos": 10853, "ðŁĶ“": 10854, "seme": 10855, "billboard": 10856, "lifting": 10857, "cumb": 10858, "scal": 10859, "ðŁĸ¤": 10860, "struck": 10861, "lv": 10862, "indiedev": 10863, "beaten": 10864, "jungle": 10865, "alright": 10866, "destiny": 10867, "ming": 10868, "kc": 10869, "chances": 10870, "oman": 10871, "qatar": 10872, "craf": 10873, "trained": 10874, "prix": 10875, "charm": 10876, "otive": 10877, "smu": 10878, "ec": 10879, "anders": 10880, "handed": 10881, "alban": 10882, "certainly": 10883, "arriving": 10884, "ize": 10885, "sai": 10886, "track": 10887, "painter": 10888, "humble": 10889, "appointment": 10890, "headline": 10891, "managing": 10892, "mod": 10893, "aspe": 10894, "andrea": 10895, "ä": 10896, "ethiop": 10897, "united": 10898, "exist": 10899, "bali": 10900, "kad": 10901, "nt": 10902, "dred": 10903, "rex": 10904, "recognize": 10905, "tampa": 10906, "beers": 10907, "atia": 10908, "heels": 10909, "note": 10910, "transportation": 10911, "turtle": 10912, "rede": 10913, "hiphop": 10914, "spicy": 10915, "spurs": 10916, "â¬ĩ": 10917, "corp": 10918, "thern": 10919, "toast": 10920, "hurry": 10921, "properties": 10922, "mage": 10923, "marco": 10924, "elements": 10925, "bouti": 10926, "syndrome": 10927, "msg": 10928, "developer": 10929, "graders": 10930, "heim": 10931, "resil": 10932, "offices": 10933, "delay": 10934, "dimen": 10935, "vintag": 10936, "barbara": 10937, "ðŁĺ±": 10938, "venezu": 10939, "cular": 10940, "faced": 10941, "barn": 10942, "ðŁĺĨ": 10943, "survivor": 10944, "worm": 10945, "confused": 10946, "passionate": 10947, "ر": 10948, "identify": 10949, "electricity": 10950, "souls": 10951, "bradley": 10952, "reportedly": 10953, "lunch": 10954, "shelf": 10955, "elia": 10956, "sweet": 10957, "smooth": 10958, "employment": 10959, "amel": 10960, "manhattan": 10961, "steam": 10962, "ounts": 10963, "yep": 10964, "living": 10965, "une": 10966, "describe": 10967, "cares": 10968, "manila": 10969, "shawn": 10970, "acted": 10971, "bash": 10972, "steven": 10973, "rest": 10974, "petition": 10975, "divine": 10976, "welsh": 10977, "race": 10978, "platinum": 10979, "ðŁĮ¸": 10980, "pb": 10981, "extraordinary": 10982, "solidarity": 10983, "mall": 10984, "onion": 10985, "scheduled": 10986, "gameof": 10987, "fergu": 10988, "dems": 10989, "norm": 10990, "pk": 10991, "trials": 10992, "policies": 10993, "publishing": 10994, "stole": 10995, "front": 10996, "character": 10997, "vania": 10998, "exce": 10999, "stie": 11000, "sca": 11001, "residential": 11002, "sailing": 11003, "ðŁĶ„ðŁĶ„ðŁĶ„": 11004, "sponsors": 11005, "thick": 11006, "champagne": 11007, "shepher": 11008, "continuing": 11009, "venice": 11010, "perth": 11011, "nap": 11012, "aster": 11013, "yak": 11014, "unlimited": 11015, "choices": 11016, "neo": 11017, "hiv": 11018, "reporter": 11019, "brussels": 11020, "fold": 11021, "dys": 11022, "semi": 11023, "lawn": 11024, "italia": 11025, "wifi": 11026, "ask": 11027, "emed": 11028, "frame": 11029, "monitoring": 11030, "stead": 11031, "ida": 11032, "grin": 11033, "isa": 11034, "flip": 11035, "restric": 11036, "offensive": 11037, "attached": 11038, "dish": 11039, "why": 11040, "phillips": 11041, "greet": 11042, "pals": 11043, "mixtape": 11044, "vou": 11045, "fielder": 11046, "spark": 11047, "alberta": 11048, "glen": 11049, "cash": 11050, "sri": 11051, "uri": 11052, "rodri": 11053, "entrepreneurs": 11054, "climatechange": 11055, "psy": 11056, "dle": 11057, "ements": 11058, "linked": 11059, "netherlands": 11060, "accidentally": 11061, "opposition": 11062, "velvet": 11063, "rays": 11064, "cw": 11065, "omo": 11066, "mf": 11067, "lmfao": 11068, "newsletter": 11069, ":)": 11070, "toilet": 11071, "literature": 11072, "disp": 11073, "philip": 11074, "uniform": 11075, "suddenly": 11076, "header": 11077, "cooler": 11078, "---": 11079, "proud": 11080, "brig": 11081, "nissan": 11082, "scientist": 11083, "jah": 11084, "concentr": 11085, "packs": 11086, "appointed": 11087, "soap": 11088, "engage": 11089, "chose": 11090, "âĻ”": 11091, "setup": 11092, "jealous": 11093, "harry": 11094, "gation": 11095, "tunnel": 11096, "temp": 11097, "oscars": 11098, "decade": 11099, "recommended": 11100, "children": 11101, "aba": 11102, "anxiety": 11103, "vements": 11104, "salon": 11105, "photoo": 11106, "organiz": 11107, "machines": 11108, "abs": 11109, "ville": 11110, "hype": 11111, "tiff": 11112, "emerging": 11113, "avgeek": 11114, "[#": 11115, "contribution": 11116, "brady": 11117, "resto": 11118, "gmail": 11119, "fitz": 11120, "photoshoot": 11121, "helmet": 11122, "ht": 11123, "elegant": 11124, "uganda": 11125, "nursing": 11126, "orleans": 11127, "penn": 11128, "nah": 11129, "footage": 11130, "ema": 11131, "wo": 11132, "wad": 11133, "concerns": 11134, "vere": 11135, "remark": 11136, "whoever": 11137, "strang": 11138, "pt": 11139, "quit": 11140, "shang": 11141, "history": 11142, "sick": 11143, "permanent": 11144, "illness": 11145, "cold": 11146, "vision": 11147, "hem": 11148, "arrow": 11149, "convic": 11150, "pink": 11151, "occup": 11152, "bald": 11153, "exhau": 11154, "uof": 11155, "amo": 11156, "ont": 11157, "ãĄ»": 11158, "adopt": 11159, "laid": 11160, "smoked": 11161, "interpre": 11162, "essenti": 11163, "associated": 11164, "bd": 11165, "bby": 11166, "fier": 11167, "install": 11168, "diplom": 11169, "conditi": 11170, "cf": 11171, "wak": 11172, "anya": 11173, "graci": 11174, "fisher": 11175, "sss": 11176, "apr": 11177, "ilit": 11178, "musician": 11179, "symphony": 11180, "cord": 11181, "hack": 11182, "legi": 11183, "lv": 11184, "blessings": 11185, "humor": 11186, "scra": 11187, "eti": 11188, "minster": 11189, "travelling": 11190, "bush": 11191, "jewellery": 11192, "lime": 11193, "!!!": 11194, "pregnant": 11195, "pee": 11196, "lob": 11197, "capital": 11198, "ipa": 11199, "pencil": 11200, "labor": 11201, "ducks": 11202, "proudly": 11203, "wedding": 11204, "derek": 11205, "mw": 11206, "peg": 11207, "valentine": 11208, "angu": 11209, "retreat": 11210, "prospect": 11211, "danger": 11212, "vulner": 11213, "upset": 11214, ",#": 11215, "srk": 11216, "xim": 11217, "thursday": 11218, "nfl": 11219, "kisses": 11220, "reds": 11221, "crack": 11222, "reward": 11223, "cu": 11224, "kok": 11225, "mete": 11226, "abandoned": 11227, "itt": 11228, "meals": 11229, "spell": 11230, "stanbul": 11231, "delays": 11232, "rum": 11233, "leop": 11234, "gum": 11235, "nova": 11236, "superman": 11237, "chick": 11238, "mis": 11239, "dramatic": 11240, "innocent": 11241, "rounds": 11242, "rec": 11243, "autism": 11244, "bangladesh": 11245, "moral": 11246, "movie": 11247, "spoo": 11248, "kla": 11249, "âĄ£": 11250, "outing": 11251, "messi": 11252, "abroad": 11253, "lookin": 11254, "aim": 11255, "qi": 11256, "stack": 11257, "collage": 11258, "Ć ĀÆ": 11259, "hudson": 11260, "scan": 11261, "hoe": 11262, "chau": 11263, "occur": 11264, "commander": 11265, "holes": 11266, "ðŁİĦ": 11267, "bias": 11268, "von": 11269, "sticker": 11270, "mak": 11271, "responsibility": 11272, "columbus": 11273, "saint": 11274, "edmon": 11275, "racism": 11276, "farms": 11277, "wen": 11278, "gulf": 11279, "mayo": 11280, "!!!!!!!!": 11281, "corporation": 11282, "bachel": 11283, "ela": 11284, "internal": 11285, "jeep": 11286, "follows": 11287, "dialogue": 11288, "derer": 11289, "smartphone": 11290, "helen": 11291, "richmond": 11292, "equity": 11293, "sland": 11294, "bg": 11295, "near": 11296, "avi": 11297, "memphis": 11298, "weir": 11299, "discussed": 11300, "badge": 11301, "pup": 11302, "mistake": 11303, "phenomen": 11304, "unite": 11305, "ðŁĽ": 11306, "depic": 11307, "rides": 11308, "inaugu": 11309, "nat": 11310, "softwitter": 11311, "combination": 11312, "gospel": 11313, "âļ¾": 11314, "admission": 11315, "retrogaming": 11316, "ðŁIJ¾": 11317, "schu": 11318, "mbo": 11319, "junction": 11320, "alarm": 11321, "à¦": 11322, "grac": 11323, "khali": 11324, "kul": 11325, "male": 11326, "caption": 11327, "wish": 11328, "tere": 11329, "corps": 11330, "rubber": 11331, "playstation": 11332, "erin": 11333, "efficient": 11334, "lor": 11335, "jokes": 11336, "inary": 11337, "norman": 11338, "luis": 11339, "inaugural": 11340, "ched": 11341, "âļ½ï¸ı": 11342, "dip": 11343, "toe": 11344, "strat": 11345, "aac": 11346, "amu": 11347, "pier": 11348, "cott": 11349, "command": 11350, "tten": 11351, "snoo": 11352, "cube": 11353, "closes": 11354, "classical": 11355, "sword": 11356, "expression": 11357, "reaching": 11358, "napp": 11359, "cost": 11360, "affect": 11361, "rico": 11362, "gif": 11363, "breathe": 11364, "tribe": 11365, "ortho": 11366, "hay": 11367, "lg": 11368, "fries": 11369, "nm": 11370, "hiding": 11371, "richards": 11372, "ende": 11373, "micro": 11374, "capitol": 11375, "copy": 11376, "rom": 11377, "regime": 11378, "maryland": 11379, "taxi": 11380, "dial": 11381, "embarra": 11382, "unbeliev": 11383, "cht": 11384, "vs": 11385, "elimin": 11386, "odd": 11387, "penny": 11388, "soundtrack": 11389, "lings": 11390, "transition": 11391, "remaining": 11392, "ais": 11393, "malik": 11394, "?!?": 11395, "random": 11396, "defend": 11397, "ultra": 11398, "trum": 11399, "dancer": 11400, "stol": 11401, "drive": 11402, "aver": 11403, "roast": 11404, "definition": 11405, "sean": 11406, "excitement": 11407, "particul": 11408, "surely": 11409, "shav": 11410, "bery": 11411, "dishes": 11412, "comm": 11413, "isol": 11414, "iam": 11415, "obli": 11416, "ghost": 11417, "hughes": 11418, "chiefs": 11419, "bas": 11420, "conservative": 11421, "special": 11422, "femin": 11423, "shri": 11424, "nancy": 11425, "intel": 11426, "tune": 11427, "ðŁĩª": 11428, "joel": 11429, "ggle": 11430, "moto": 11431, "ðŁĺĶ": 11432, "buck": 11433, "dag": 11434, "anticip": 11435, "montana": 11436, "guid": 11437, "frog": 11438, "ecraft": 11439, "ope": 11440, "drives": 11441, "numer": 11442, "xy": 11443, "colorful": 11444, "wednesdaywisdom": 11445, "illumin": 11446, "beyon": 11447, "inaugur": 11448, "deeply": 11449, "prefer": 11450, "fortune": 11451, "cooked": 11452, "tible": 11453, "âĺķ": 11454, "sweater": 11455, "itter": 11456, "tty": 11457, "ui": 11458, "gie": 11459, "complic": 11460, "~~": 11461, "taxes": 11462, "cups": 11463, "diverse": 11464, "samanth": 11465, "âłĢâłĢ": 11466, "baking": 11467, "symp": 11468, "wai": 11469, "behalf": 11470, "mercur": 11471, "travels": 11472, "ðŁİīðŁİ": 11473, "oria": 11474, "engaged": 11475, "jumping": 11476, "retired": 11477, "naked": 11478, "puni": 11479, "speedway": 11480, "sciences": 11481, "rehearsal": 11482, "onym": 11483, "dyou": 11484, "plates": 11485, "rati": 11486, "krish": 11487, "jazz": 11488, "carol": 11489, "raf": 11490, "penalty": 11491, "timeline": 11492, "ruby": 11493, "engineers": 11494, "raf": 11495, "belle": 11496, "dose": 11497, "cheon": 11498, "escap": 11499, "meg": 11500, "rank": 11501, "ord": 11502, "megan": 11503, "merch": 11504, "eclipse": 11505, "âĺºï¸ı": 11506, "pledge": 11507, "kirk": 11508, "persi": 11509, "leicester": 11510, "sak": 11511, "wk": 11512, "safely": 11513, "yyy": 11514, "jet": 11515, "promised": 11516, "jc": 11517, "enne": 11518, "noah": 11519, "reno": 11520, "rea": 11521, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, "trail": 11523, "ðŁijĢ": 11524, "fd": 11525, "sooo": 11526, "rimin": 11527, "wk": 11528, "า": 11529, "ial": 11530, "xox": 11531, "biscu": 11532, "dale": 11533, "fandom": 11534, "participating": 11535, "flag": 11536, "privilege": 11537, "peach": 11538, "machine": 11539, "boston": 11540, "gross": 11541, "og": 11542, "miracle": 11543, "adoption": 11544, "uss": 11545, "monsters": 11546, "beij": 11547, "clarke": 11548, "pushing": 11549, "praying": 11550, "aro": 11551, "dn": 11552, "ellis": 11553, "apollo": 11554, "odds": 11555, "refugee": 11556, "tow": 11557, "bp": 11558, "ðŁĩ¬ðŁĩ§": 11559, "hend": 11560, "appeared": 11561, "membership": 11562, "pean": 11563, "dum": 11564, "violent": 11565, "vy": 11566, "potatoes": 11567, "aww": 11568, "greetings": 11569, "tts": 11570, "acon": 11571, "shane": 11572, "photographed": 11573, "crab": 11574, "temperatures": 11575, "cuba": 11576, "cfc": 11577, "welcom": 11578, "hel": 11579, "innings": 11580, "mk": 11581, "code": 11582, "knock": 11583, "grass": 11584, "swedish": 11585, "pta": 11586, "icky": 11587, "vat": 11588, "lining": 11589, "sq": 11590, "sap": 11591, "arc": 11592, "announcing": 11593, "skins": 11594, "cityof": 11595, "bring": 11596, "cox": 11597, "gamer": 11598, "itarian": 11599, "ida": 11600, "hd": 11601, "rosse": 11602, "sadly": 11603, "geo": 11604, "âļ”ï¸ı": 11605, "tags": 11606, "father": 11607, "change": 11608, "lance": 11609, "whiskey": 11610, "adelaide": 11611, "tec": 11612, "stickers": 11613, "market": 11614, "classy": 11615, "badass": 11616, "florence": 11617, "liner": 11618, "frost": 11619, "kate": 11620, "acon": 11621, "scandal": 11622, "essex": 11623, "ðŁĺı": 11624, "vivi": 11625, "drill": 11626, "bloggers": 11627, "recommend": 11628, "dha": 11629, "acres": 11630, "roma": 11631, "buy": 11632, "grocer": 11633, "eria": 11634, "mahar": 11635, "ffer": 11636, "patterns": 11637, "veri": 11638, "compu": 11639, "stev": 11640, "anga": 11641, "mentor": 11642, "doo": 11643, "itali": 11644, "cdnpoli": 11645, "only": 11646, "conduct": 11647, "electro": 11648, "def": 11649, "whale": 11650, "preparation": 11651, "bicycle": 11652, "viral": 11653, "turnout": 11654, "brass": 11655, "quad": 11656, "hospitality": 11657, "packaging": 11658, "dency": 11659, "cemetery": 11660, "aboard": 11661, "dreaming": 11662, "picture": 11663, "tall": 11664, "invent": 11665, "admi": 11666, "oe": 11667, "temps": 11668, "quan": 11669, "fundam": 11670, "promp": 11671, "residence": 11672, "mud": 11673, "souri": 11674, "âĦ¢": 11675, "graffiti": 11676, "gif": 11677, "dnd": 11678, "comp": 11679, "swar": 11680, "peeps": 11681, "palestine": 11682, "devils": 11683, "sang": 11684, "assistance": 11685, "bike": 11686, "mississi": 11687, "interviewed": 11688, "nephew": 11689, "drums": 11690, "vand": 11691, "gentlemen": 11692, "nsw": 11693, "insta": 11694, "lebanon": 11695, "eeee": 11696, "olivia": 11697, "very": 11698, "rough": 11699, "industries": 11700, "mation": 11701, "ðŁĺē": 11702, "barrel": 11703, "nay": 11704, "pops": 11705, "modern": 11706, "illy": 11707, "arest": 11708, "onents": 11709, "protecting": 11710, "vans": 11711, "eo": 11712, "vikings": 11713, "restaurants": 11714, "reck": 11715, "jackie": 11716, "andrew": 11717, "willing": 11718, "heath": 11719, "citizen": 11720, "discrimin": 11721, "à¹Ī": 11722, "stuart": 11723, "mys": 11724, "hip": 11725, "transp": 11726, "\"?": 11727, "tex": 11728, "sushi": 11729, "ked": 11730, "crossed": 11731, "distur": 11732, "pedia": 11733, "fate": 11734, "somehow": 11735, "moth": 11736, "processing": 11737, "iss": 11738, "rin": 11739, "uts": 11740, "yyc": 11741, "vert": 11742, "lgbt": 11743, "reid": 11744, "onto": 11745, "arabia": 11746, "habitat": 11747, "==": 11748, "streak": 11749, "simpson": 11750, "addiction": 11751, "wimble": 11752, "delivers": 11753, "challenging": 11754, "ðŁİ¶": 11755, "franch": 11756, "edu": 11757, "sme": 11758, "aids": 11759, "hurst": 11760, "tham": 11761, "tarian": 11762, "remembered": 11763, "palestinian": 11764, "fees": 11765, "trum": 11766, "sketch": 11767, "uru": 11768, "fitting": 11769, "jesse": 11770, "ðŁĶ„ðŁĶ„": 11771, "--------": 11772, "bach": 11773, "icia": 11774, "colored": 11775, "dah": 11776, "associate": 11777, "intel": 11778, "seller": 11779, "pu": 11780, "stuffed": 11781, "acs": 11782, "bs": 11783, "shin": 11784, "cooperation": 11785, "certificate": 11786, "abu": 11787, "ingredients": 11788, "rev": 11789, "inge": 11790, "elder": 11791, "christian": 11792, "bundle": 11793, "thic": 11794, "dirt": 11795, "beijing": 11796, "commit": 11797, "teddy": 11798, "edu": 11799, "today": 11800, "sfield": 11801, "wyn": 11802, "confirms": 11803, "loo": 11804, "jv": 11805, "eness": 11806, "alpha": 11807, "virus": 11808, "arium": 11809, "grind": 11810, "bridges": 11811, "introduction": 11812, "polls": 11813, "bacter": 11814, "zach": 11815, "terminal": 11816, "raiders": 11817, "flavor": 11818, "zombie": 11819, "vod": 11820, "spreading": 11821, "gameofthrones": 11822, "efficiency": 11823, "lately": 11824, "alem": 11825, "tweet": 11826, "crimes": 11827, "cler": 11828, "dey": 11829, "dged": 11830, "hyun": 11831, "payments": 11832, "circus": 11833, "ðŁĺŃðŁĺŃ": 11834, "missouri": 11835, "lub": 11836, "episodes": 11837, "cage": 11838, "pos": 11839, "matching": 11840, "tumblr": 11841, "lined": 11842, "gest": 11843, "ambi": 11844, "narr": 11845, "ington": 11846, "regul": 11847, "blown": 11848, "isle": 11849, "coco": 11850, "ondon": 11851, "joshua": 11852, "touring": 11853, "sma": 11854, "sausage": 11855, "bestfriend": 11856, "boeing": 11857, "desire": 11858, "savage": 11859, "rapper": 11860, "devo": 11861, "tear": 11862, "takeover": 11863, "cowboys": 11864, "poker": 11865, "parag": 11866, "ppe": 11867, "hint": 11868, "wears": 11869, "seth": 11870, "roles": 11871, "lanc": 11872, "manga": 11873, "format": 11874, "flyer": 11875, "cay": 11876, "moor": 11877, "bake": 11878, "splash": 11879, "vad": 11880, "kerala": 11881, "proceeds": 11882, "silly": 11883, "reflection": 11884, "distr": 11885, "wid": 11886, "suit": 11887, "civic": 11888, "yankees": 11889, "byn": 11890, "migration": 11891, "distin": 11892, "orch": 11893, "femini": 11894, "qualifying": 11895, "turi": 11896, "obe": 11897, "hundred": 11898, "crap": 11899, "wang": 11900, "mathemat": 11901, "bure": 11902, "exposure": 11903, "ferguson": 11904, "semester": 11905, "reserv": 11906, "plym": 11907, "ahu": 11908, "facial": 11909, "wax": 11910, "worried": 11911, "cab": 11912, "vio": 11913, "asa": 11914, "cod": 11915, "topics": 11916, "pcs": 11917, "halo": 11918, "rescued": 11919, "horizon": 11920, "ark": 11921, "âļª": 11922, "holly": 11923, "elf": 11924, "ulti": 11925, "pup": 11926, "qualified": 11927, "attendance": 11928, "atively": 11929, "destroy": 11930, "yc": 11931, "forth": 11932, "photooftheday": 11933, "cents": 11934, "iceland": 11935, "measures": 11936, "desk": 11937, "portfolio": 11938, "articles": 11939, "directors": 11940, "datab": 11941, "ew": 11942, "creepy": 11943, "ounding": 11944, "honoured": 11945, "mist": 11946, "jit": 11947, "mentioned": 11948, "portable": 11949, "itic": 11950, "dann": 11951, "fridayfeeling": 11952, "amid": 11953, "tiger": 11954, "scrip": 11955, "helicopter": 11956, "hardware": 11957, "explor": 11958, "workplace": 11959, "austria": 11960, "beatles": 11961, "bernar": 11962, "spider": 11963, "disco": 11964, "cult": 11965, "limits": 11966, "shortly": 11967, "final": 11968, "ninja": 11969, "luke": 11970, "lebron": 11971, "walmart": 11972, "oil": 11973, "vanilla": 11974, "shire": 11975, "yeg": 11976, "aky": 11977, "cs": 11978, "bler": 11979, "collected": 11980, "tg": 11981, "rolled": 11982, "specials": 11983, "bff": 11984, "pierre": 11985, "shim": 11986, "vier": 11987, "flashback": 11988, "restoration": 11989, "individuals": 11990, "prod": 11991, "freaking": 11992, "turer": 11993, "oa": 11994, "refre": 11995, "moroc": 11996, "greet": 11997, "reyn": 11998, "careful": 11999, "ouring": 12000, "ush": 12001, "isd": 12002, "gill": 12003, "view": 12004, "thunderstorm": 12005, "bled": 12006, "picnic": 12007, "guardi": 12008, "pig": 12009, "ark": 12010, "sylvania": 12011, "banned": 12012, "ucl": 12013, "vijay": 12014, "orium": 12015, "avengers": 12016, "believes": 12017, "eur": 12018, "monument": 12019, "concerned": 12020, "labs": 12021, "berg": 12022, "aap": 12023, "vish": 12024, "singles": 12025, "cancel": 12026, "zel": 12027, "arab": 12028, "ruth": 12029, "tooth": 12030, "arta": 12031, "shaf": 12032, "chairs": 12033, "rack": 12034, "diseases": 12035, "crowd": 12036, "cly": 12037, "flex": 12038, "christma": 12039, "artificial": 12040, "tomat": 12041, "fine": 12042, "draws": 12043, "advocate": 12044, "france": 12045, "ƙĬ": 12046, "ðŁĺ³": 12047, "heavy": 12048, "sour": 12049, "comprehen": 12050, "noble": 12051, "aap": 12052, "hindu": 12053, "coral": 12054, "gars": 12055, "owen": 12056, "nl": 12057, "stall": 12058, "yellow": 12059, "marina": 12060, "inver": 12061, "support": 12062, "tough": 12063, "promises": 12064, "pie": 12065, "masterpiece": 12066, "score": 12067, "force": 12068, "mortg": 12069, "cryptocurrency": 12070, "ox": 12071, "rors": 12072, "rockin": 12073, "provin": 12074, "hog": 12075, "nostal": 12076, "oakland": 12077, "patrick": 12078, "inclusion": 12079, "traffic": 12080, "ahmed": 12081, "aha": 12082, "luxury": 12083, "consecu": 12084, "demon": 12085, "âĸº": 12086, "blowing": 12087, "stag": 12088, ":\"": 12089, "encourage": 12090, "bene": 12091, "skull": 12092, "dodge": 12093, "buster": 12094, "kinson": 12095, "witne": 12096, "error": 12097, "lowest": 12098, "fellow": 12099, "à°": 12100, "shre": 12101, "blur": 12102, "virgin": 12103, "composer": 12104, "slip": 12105, "mornings": 12106, "gains": 12107, "table": 12108, "grain": 12109, "arist": 12110, "brazilian": 12111, "wwe": 12112, "tues": 12113, "ribbon": 12114, "anag": 12115, "dist": 12116, "sacrif": 12117, "embrace": 12118, "entrepreneur": 12119, "affili": 12120, "deo": 12121, "tali": 12122, "tourist": 12123, "fatal": 12124, "ƬĬ": 12125, "automatic": 12126, "ðŁĩµ": 12127, "weak": 12128, "welfare": 12129, "confirm": 12130, "benjamin": 12131, "fights": 12132, "alleged": 12133, "mead": 12134, "struggling": 12135, "prosecu": 12136, "chef": 12137, "è": 12138, "proposal": 12139, "ern": 12140, "ðŁĺĦ": 12141, "dyk": 12142, "ongs": 12143, "hong": 12144, "mack": 12145, "melon": 12146, "onent": 12147, "rush": 12148, "dap": 12149, "toler": 12150, "propag": 12151, "cze": 12152, "translation": 12153, "wallet": 12154, "cottage": 12155, "sail": 12156, "constitution": 12157, "ðŁēĢ": 12158, "munici": 12159, "favor": 12160, "stormhour": 12161, "ih": 12162, "ðŁĺĮ": 12163, "approaching": 12164, "pinned": 12165, "jed": 12166, "nigerian": 12167, "nach": 12168, "shat": 12169, "particularly": 12170, "mcdon": 12171, "cameras": 12172, "annie": 12173, "administr": 12174, "heat": 12175, "electrical": 12176, "charming": 12177, "gibson": 12178, "boutique": 12179, "exposed": 12180, "actor": 12181, "pillow": 12182, "beaches": 12183, "genuine": 12184, "margaret": 12185, "bennett": 12186, "louisi": 12187, "positions": 12188, "ely": 12189, "shiny": 12190, "tention": 12191, "architect": 12192, "rental": 12193, "acqui": 12194, "google": 12195, "subway": 12196, "moment": 12197, "ðŁļ¨": 12198, "rim": 12199, "methods": 12200, "cycli": 12201, "norfolk": 12202, "ÙĪ": 12203, "overwhel": 12204, "rapid": 12205, "wear": 12206, "happybirthday": 12207, "progressive": 12208, "ðŁē„": 12209, "cogn": 12210, "papa": 12211, "fool": 12212, "philosophy": 12213, "polar": 12214, "jimmy": 12215, "wig": 12216, "ðŁēĭ": 12217, "operating": 12218, "reduction": 12219, "phi": 12220, "flags": 12221, "tothe": 12222, "odi": 12223, "ares": 12224, "koo": 12225, "kang": 12226, "arkansas": 12227, "ashton": 12228, "wimbledon": 12229, "scifi": 12230, "attractive": 12231, "mississippi": 12232, "logists": 12233, "ralph": 12234, "label": 12235, "graduates": 12236, "maha": 12237, "hometown": 12238, "âľĮï¸ı": 12239, "founded": 12240, "onthe": 12241, "liz": 12242, "transl": 12243, "minimum": 12244, "presti": 12245, "tam": 12246, "generations": 12247, "rebel": 12248, "journalists": 12249, "param": 12250, "mcm": 12251, "acrylic": 12252, "deaths": 12253, "tesla": 12254, "wt": 12255, "bryant": 12256, "jerus": 12257, "istanbul": 12258, "muhammad": 12259, "riley": 12260, "kris": 12261, "workshops": 12262, "iso": 12263, "counts": 12264, "stret": 12265, "protected": 12266, "trinity": 12267, "manual": 12268, "rhin": 12269, "ril": 12270, "pleasant": 12271, "lemon": 12272, "nerd": 12273, "harder": 12274, "darren": 12275, "bury": 12276, "rah": 12277, "basis": 12278, "migu": 12279, "occasion": 12280, "lists": 12281, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, "eb": 12283, "decre": 12284, "hampton": 12285, "ìĿ“": 12286, "travis": 12287, "transform": 12288, "puerto": 12289, "nhl": 12290, "avoc": 12291, "trips": 12292, "unexpected": 12293, "vet": 12294, "didyou": 12295, "barber": 12296, "stages": 12297, "mson": 12298, "represented": 12299, "fort": 12300, "lal": 12301, "pple": 12302, "nicely": 12303, "ignore": 12304, "quil": 12305, "quinn": 12306, "hk": 12307, "carrier": 12308, "reminded": 12309, "among": 12310, "passenger": 12311, "ellen": 12312, "guez": 12313, "scape": 12314, "mural": 12315, "youngest": 12316, "mash": 12317, "dill": 12318, "routine": 12319, "stainless": 12320, "jackson": 12321, "gandhi": 12322, "thal": 12323, "oners": 12324, "editorial": 12325, "conversations": 12326, "sdale": 12327, "automation": 12328, "ike": 12329, "าà¸": 12330, "ðŁĩª": 12331, "haul": 12332, "laying": 12333, "mentions": 12334, "amen": 12335, "abortion": 12336, "ibi": 12337, "counties": 12338, "catherine": 12339, "mands": 12340, "jame": 12341, "roller": 12342, "aut": 12343, "nam": 12344, "ological": 12345, "ception": 12346, "ranking": 12347, "toxic": 12348, "snacks": 12349, "victorian": 12350, "bangkok": 12351, "psychology": 12352, "reg": 12353, "angela": 12354, "respond": 12355, "style": 12356, "sophie": 12357, "dakota": 12358, "achieved": 12359, "marked": 12360, "imperial": 12361, "inas": 12362, "gloves": 12363, "slim": 12364, "confident": 12365, "attacked": 12366, "gger": 12367, "lonely": 12368, "valentinesday": 12369, "reb": 12370, "craftbeer": 12371, "origin": 12372, "zimbab": 12373, "ceiling": 12374, "teens": 12375, "otherwise": 12376, "wb": 12377, "fers": 12378, "daysof": 12379, "advisor": 12380, "yah": 12381, "âĻª": 12382, "ender": 12383, "republicans": 12384, "ava": 12385, "skirt": 12386, "pipel": 12387, "chie": 12388, "jane": 12389, "jax": 12390, "ðŁĺĭ": 12391, "âľĬ": 12392, "jays": 12393, "brett": 12394, "balo": 12395, "crucial": 12396, "dhar": 12397, "asis": 12398, "deau": 12399, "lloyd": 12400, "chatting": 12401, "âĿĦï¸ı": 12402, "relay": 12403, "remarkable": 12404, "ns": 12405, "wet": 12406, "brisbane": 12407, "ðŁĶ“": 12408, "tionally": 12409, "fk": 12410, "layer": 12411, "household": 12412, "consecutive": 12413, "esis": 12414, "pendant": 12415, "stir": 12416, "critic": 12417, "sugar": 12418, "photoshop": 12419, "pares": 12420, "artistic": 12421, "dodgers": 12422, "cun": 12423, "crafted": 12424, "amend": 12425, "boat": 12426, "âŃIJï¸ı": 12427, "egyptian": 12428, "saw": 12429, "trage": 12430, "smaller": 12431, "oxy": 12432, "paired": 12433, "next": 12434, "ires": 12435, "taco": 12436, "oy": 12437, "uc": 12438, "sti": 12439, "aerial": 12440, "://": 12441, "dro": 12442, "dotcom": 12443, "ggins": 12444, "rpg": 12445, "aye": 12446, "lean": 12447, "striker": 12448, "lobby": 12449, "protests": 12450, "priority": 12451, "congress": 12452, "amate": 12453, "invit": 12454, "rington": 12455, "mommy": 12456, "thus": 12457, "allowing": 12458, "pioneer": 12459, "enforcement": 12460, "gori": 12461, "talk": 12462, "drag": 12463, "dumb": 12464, "bullet": 12465, "sange": 12466, "ery": 12467, "targets": 12468, "ðŁĩ¦": 12469, "heather": 12470, "consider": 12471, "seafood": 12472, "vest": 12473, "risks": 12474, "%.": 12475, "pg": 12476, "sacred": 12477, "heating": 12478, "kicked": 12479, "ttot": 12480, ".-": 12481, "chandi": 12482, "coven": 12483, "pool": 12484, "pulse": 12485, "ia": 12486, "roster": 12487, "shakespeare": 12488, "esa": 12489, "cargo": 12490, "peanut": 12491, "troop": 12492, "action": 12493, "tablet": 12494, "homework": 12495, "castle": 12496, "struction": 12497, "musicians": 12498, "freezing": 12499, "butt": 12500, "justinbieber": 12501, "jj": 12502, "bahrain": 12503, "anthem": 12504, "audit": 12505, "didyouknow": 12506, "navig": 12507, "guidance": 12508, "âĸ¶": 12509, "turf": 12510, "nun": 12511, "fications": 12512, "yemen": 12513, "charging": 12514, "xc": 12515, "broncos": 12516, "subur": 12517, "pale": 12518, "boring": 12519, "amongst": 12520, "forthe": 12521, "emper": 12522, "omfg": 12523, "pj": 12524, "expecting": 12525, "ðŁē«": 12526, "stl": 12527, "admin": 12528, "expectations": 12529, "swan": 12530, "shoot": 12531, "ooooo": 12532, "minent": 12533, "ãĢIJ": 12534, "wallace": 12535, "stang": 12536, "saturday": 12537, "adopted": 12538, "doubles": 12539, "homie": 12540, "omez": 12541, "dhan": 12542, "venture": 12543, "surrounding": 12544, "file": 12545, "mobility": 12546, "dees": 12547, "wski": 12548, "brooke": 12549, "embro": 12550, "remembers": 12551, "kara": 12552, "testim": 12553, "botan": 12554, "mtv": 12555, "sacrifice": 12556, "jerusalem": 12557, "dl": 12558, "“": 12559, "properly": 12560, "ilion": 12561, "asi": 12562, "legit": 12563, "cope": 12564, "mcla": 12565, "recycling": 12566, "larger": 12567, "ðŁēĵ": 12568, "patric": 12569, "generous": 12570, "jared": 12571, "pf": 12572, "molly": 12573, "thomas": 12574, "judges": 12575, "hb": 12576, "sorts": 12577, "blvd": 12578, "oven": 12579, "entering": 12580, "planes": 12581, "beet": 12582, "integration": 12583, "booked": 12584, "freed": 12585, "vern": 12586, "ashes": 12587, "topped": 12588, "depot": 12589, "welcomed": 12590, "rena": 12591, "mick": 12592, "dand": 12593, "seeks": 12594, "gamer": 12595, "rankings": 12596, "rene": 12597, "mut": 12598, "whisky": 12599, "firefighters": 12600, "gues": 12601, "gather": 12602, "tourney": 12603, "demen": 12604, "yang": 12605, "newton": 12606, "automotive": 12607, "backyard": 12608, "detailed": 12609, "mist": 12610, "tobac": 12611, "fiber": 12612, "unusual": 12613, "gratitude": 12614, "spare": 12615, "neys": 12616, ":*": 12617, "peri": 12618, "floating": 12619, "finalist": 12620, "donating": 12621, "dress": 12622, "broad": 12623, "bethe": 12624, "economics": 12625, "taiwan": 12626, "edwards": 12627, "plug": 12628, "prairi": 12629, "valen": 12630, "baba": 12631, "fad": 12632, "anas": 12633, "harper": 12634, "disorder": 12635, "applied": 12636, "patt": 12637, "bikin": 12638, "liver": 12639, "curi": 12640, "caroline": 12641, "anner": 12642, "julian": 12643, "walking": 12644, "malcol": 12645, "screenshot": 12646, "coding": 12647, "skincare": 12648, "activists": 12649, "mysterious": 12650, "exact": 12651, "blocking": 12652, "mercury": 12653, "batter": 12654, "dump": 12655, "âľĮ": 12656, "ense": 12657, "lish": 12658, "ridiculous": 12659, "protesters": 12660, "ðŁĻĪ": 12661, "lust": 12662, "sweat": 12663, "ass": 12664, "alike": 12665, "cody": 12666, "rements": 12667, "winds": 12668, "aspir": 12669, "vienna": 12670, "pray": 12671, "...@": 12672, "boi": 12673, "candle": 12674, "assists": 12675, "tee": 12676, "derson": 12677, "pony": 12678, "fence": 12679, "conspir": 12680, "âĺħâĺħ": 12681, "ooth": 12682, "epic": 12683, "barely": 12684, "aunt": 12685, "bam": 12686, "diamonds": 12687, "endless": 12688, "screens": 12689, "cancer": 12690, "gro": 12691, "pst": 12692, "prospec": 12693, "mosque": 12694, "helpful": 12695, "ouri": 12696, "brother": 12697, "gujar": 12698, "cristi": 12699, "inez": 12700, "towers": 12701, "addresses": 12702, "gray": 12703, "burton": 12704, "retweeted": 12705, "ð٤Ķ": 12706, "nity": 12707, "duck": 12708, "supervis": 12709, "joan": 12710, "kinder": 12711, "sanctu": 12712, "pied": 12713, "âı°": 12714, "łï¸ı": 12715, "mati": 12716, "revenge": 12717, "cester": 12718, "elife": 12719, "designers": 12720, "backed": 12721, "boli": 12722, "weight": 12723, "couch": 12724, "sures": 12725, "sits": 12726, "shrimp": 12727, "lagos": 12728, "authorities": 12729, "osity": 12730, "holly": 12731, "computing": 12732, "factors": 12733, "abe": 12734, "panels": 12735, "ramad": 12736, "sentence": 12737, "mission": 12738, "holm": 12739, "rb": 12740, "dads": 12741, "shanghai": 12742, "money": 12743, "sheets": 12744, "skate": 12745, "threw": 12746, "cupcakes": 12747, "infinite": 12748, "lis": 12749, "practicing": 12750, "essay": 12751, "kai": 12752, "asci": 12753, "mob": 12754, "ugh": 12755, "holmes": 12756, "regg": 12757, "ikh": 12758, "mock": 12759, "collections": 12760, "pep": 12761, "ova": 12762, "salt": 12763, "nandez": 12764, "coy": 12765, "threats": 12766, "texts": 12767, "cinnam": 12768, "pregnancy": 12769, "pending": 12770, "stamp": 12771, "flower": 12772, "gis": 12773, "agreed": 12774, "payne": 12775, "rover": 12776, "phra": 12777, "soft": 12778, "ffin": 12779, "fathers": 12780, "passengers": 12781, "aways": 12782, "ala": 12783, "hes": 12784, "livan": 12785, "ins": 12786, "samuel": 12787, "ingui": 12788, "hof": 12789, "jj": 12790, "chennai": 12791, "catal": 12792, "omic": 12793, "heath": 12794, "niece": 12795, "pumped": 12796, "integrated": 12797, "arel": 12798, "nom": 12799, "productivity": 12800, "wanting": 12801, "visa": 12802, "diana": 12803, "twil": 12804, "itv": 12805, "camps": 12806, "rowing": 12807, "dley": 12808, "blackand": 12809, "guards": 12810, "bells": 12811, "reverse": 12812, "vibe": 12813, "ricky": 12814, "moss": 12815, "nyt": 12816, "âĺĢï¸ı": 12817, "elle": 12818, "troy": 12819, "cudd": 12820, "evan": 12821, "womens": 12822, "foto": 12823, "mistakes": 12824, "wicked": 12825, "mil": 12826, "cled": 12827, "memes": 12828, "cosmo": 12829, "scholar": 12830, "reno": 12831, "ðŁĺĢ": 12832, "vents": 12833, "#â̦": 12834, "terrorists": 12835, "casey": 12836, "cardinals": 12837, "ðŁĺĬðŁĺĬ": 12838, "venezuela": 12839, "bola": 12840, "literacy": 12841, "tw": 12842, "eno": 12843, "contains": 12844, "austin": 12845, "financi": 12846, "evan": 12847, "harvard": 12848, "originally": 12849, "chevro": 12850, "herald": 12851, "nottingham": 12852, "managers": 12853, "âŀ”": 12854, "accepting": 12855, "walsh": 12856, "tutorial": 12857, "entrepreneurship": 12858, "yacht": 12859, "requirements": 12860, "glenn": 12861, "pede": 12862, "unfortunately": 12863, "aching": 12864, "daisy": 12865, "gian": 12866, "nightmare": 12867, "âĿĹ": 12868, "rina": 12869, "bart": 12870, "emails": 12871, "opposite": 12872, "whom": 12873, "sake": 12874, "puzzle": 12875, "dashi": 12876, "party": 12877, "blanket": 12878, "buses": 12879, "lore": 12880, "beauty": 12881, "reason": 12882, "punjab": 12883, "windsor": 12884, "functional": 12885, "existing": 12886, "hello": 12887, "glimp": 12888, "convin": 12889, "lak": 12890, "screaming": 12891, "rebecca": 12892, "bliss": 12893, "northwest": 12894, "infinity": 12895, "cosmetics": 12896, "pulling": 12897, "coffee": 12898, "pling": 12899, "opho": 12900, "colombia": 12901, "interiordesign": 12902, "(+": 12903, "emotions": 12904, "sac": 12905, "sunglasses": 12906, "saves": 12907, "df": 12908, "sixth": 12909, "aly": 12910, "ðŁĺ»": 12911, "deen": 12912, "devast": 12913, "politicians": 12914, "lacrosse": 12915, "gu": 12916, "pei": 12917, "java": 12918, "combine": 12919, "coalition": 12920, "erts": 12921, "surviv": 12922, "chad": 12923, "strian": 12924, "nn": 12925, "devi": 12926, "counc": 12927, "concern": 12928, "controller": 12929, "breast": 12930, "jury": 12931, "tum": 12932, "introduces": 12933, "ladi": 12934, "mobile": 12935, "alz": 12936, "steady": 12937, "nurses": 12938, "hacking": 12939, "online": 12940, "ocean": 12941, "ðŁİĦ": 12942, "aam": 12943, "juven": 12944, "icc": 12945, "louisiana": 12946, "arte": 12947, "streetart": 12948, "ison": 12949, "wns": 12950, "frm": 12951, "panda": 12952, "noir": 12953, "maintain": 12954, "delay": 12955, "symptoms": 12956, "thorn": 12957, "geome": 12958, "tern": 12959, "carried": 12960, "pru": 12961, "panor": 12962, "assy": 12963, "peru": 12964, "cloud": 12965, "spra": 12966, "pedi": 12967, "este": 12968, "tagged": 12969, "ðŁĺĿ": 12970, "shadows": 12971, "nazi": 12972, "Ć˜Ā§Ć™Ä¦": 12973, "corri": 12974, "âĻ„âĻ„": 12975, "jad": 12976, "ðŁĩ«": 12977, "formal": 12978, "spoken": 12979, "ðŁĮŀ": 12980, "enjoy": 12981, "lopez": 12982, "outlook": 12983, "inho": 12984, "wander": 12985, "ƙħ": 12986, "maya": 12987, "pee": 12988, "dine": 12989, "ãĢij": 12990, "briefing": 12991, "supporter": 12992, "arily": 12993, "ghters": 12994, "naturally": 12995, "doctorwho": 12996, "jen": 12997, "var": 12998, "newyear": 12999, "rese": 13000, "simm": 13001, "rex": 13002, "consequ": 13003, "tomatoes": 13004, "burst": 13005, "bravo": 13006, "burgers": 13007, "cracking": 13008, "northeast": 13009, "biom": 13010, "mushroom": 13011, "marque": 13012, "double": 13013, "nier": 13014, "vag": 13015, "twenty": 13016, "keyboard": 13017, "winni": 13018, "jamaica": 13019, "parish": 13020, ":-": 13021, "mentalhealth": 13022, "alizing": 13023, "render": 13024, "waking": 13025, "ðŁİĤ": 13026, "gly": 13027, "nathan": 13028, "washing": 13029, "melissa": 13030, "jung": 13031, "loyal": 13032, "chili": 13033, "songwriter": 13034, "guitarist": 13035, "bowie": 13036, "neighbors": 13037, "onymous": 13038, "asset": 13039, "tai": 13040, "headquarters": 13041, "ðŁĮĪ": 13042, "ihear": 13043, "cigare": 13044, "surg": 13045, ")\"": 13046, "repl": 13047, "darling": 13048, "ðŁĻĦ": 13049, "zak": 13050, "sare": 13051, "ãħĭ": 13052, "mickey": 13053, "warehouse": 13054, "massage": 13055, "inees": 13056, "didnt": 13057, "iw": 13058, "hurts": 13059, "engaging": 13060, "magic": 13061, "womenin": 13062, "kitten": 13063, "mors": 13064, "cart": 13065, "titans": 13066, "colleague": 13067, "competing": 13068, "eran": 13069, "khal": 13070, "marble": 13071, "demand": 13072, "delight": 13073, "etary": 13074, "blizz": 13075, "louise": 13076, "mls": 13077, "finishes": 13078, "experiment": 13079, "conducted": 13080, "electronics": 13081, "itters": 13082, "caring": 13083, "whats": 13084, "symbol": 13085, "jung": 13086, "ecu": 13087, "pix": 13088, "context": 13089, "charger": 13090, "ðŁĺĩ": 13091, "reig": 13092, "frag": 13093, "ƫĭ": 13094, "chad": 13095, "true": 13096, "kerry": 13097, "defending": 13098, "aint": 13099, "auton": 13100, "checkout": 13101, "barnes": 13102, "lessly": 13103, "dt": 13104, "mme": 13105, "cloudy": 13106, "secondary": 13107, "arez": 13108, "_:": 13109, "appa": 13110, "constant": 13111, "\")": 13112, "vets": 13113, "job": 13114, "ient": 13115, "ðŁĺŃðŁĺŃðŁĺŃ": 13116, "mj": 13117, "french": 13118, "diver": 13119, "davies": 13120, "hhhh": 13121, "ebook": 13122, "à¹ī": 13123, "mariti": 13124, "breeze": 13125, "suspended": 13126, "mato": 13127, "viet": 13128, "rahu": 13129, "sei": 13130, "bolt": 13131, "enary": 13132, "leis": 13133, "karl": 13134, "framed": 13135, "explaining": 13136, "abc": 13137, "dealing": 13138, "nato": 13139, "jake": 13140, "expand": 13141, "leonard": 13142, "established": 13143, "dub": 13144, "armen": 13145, "elled": 13146, "vocal": 13147, "nicholas": 13148, "orient": 13149, "kyo": 13150, "illustrated": 13151, "ahh": 13152, "dancers": 13153, "million": 13154, "geta": 13155, "popp": 13156, "asu": 13157, "murdered": 13158, "gible": 13159, "stoked": 13160, "griffin": 13161, "maximum": 13162, "adrian": 13163, "encounter": 13164, "thero": 13165, "davidson": 13166, "ðŁį»": 13167, "holiday": 13168, "evo": 13169, "assets": 13170, "carson": 13171, "memorable": 13172, "âļ½": 13173, "obam": 13174, "representative": 13175, "cbd": 13176, "tricks": 13177, "vogue": 13178, "voice": 13179, "mmmm": 13180, "sebastian": 13181, "clif": 13182, "athy": 13183, "paralle": 13184, "ðŁ¤·": 13185, "pak": 13186, "evacu": 13187, "eats": 13188, "اØ": 13189, "touched": 13190, "organised": 13191, "spirits": 13192, "canad": 13193, "guided": 13194, "framework": 13195, "ðŁĮŁ": 13196, "ped": 13197, "natural": 13198, "agar": 13199, "replaced": 13200, "anchor": 13201, "tit": 13202, "shah": 13203, "organis": 13204, "superior": 13205, "rn": 13206, "chro": 13207, "erica": 13208, "still": 13209, "coron": 13210, "chuck": 13211, "locks": 13212, "organ": 13213, "rosen": 13214, "scam": 13215, "bened": 13216, "/#": 13217, "keen": 13218, "trevor": 13219, "vampire": 13220, "sorted": 13221, "!'": 13222, "afford": 13223, "intro": 13224, "grace": 13225, "ðŁĺľ": 13226, "saur": 13227, "kickstarter": 13228, "influen": 13229, "vu": 13230, "yup": 13231, "poc": 13232, "ðŁİ„": 13233, "aar": 13234, "sang": 13235, "trek": 13236, "etsy": 13237, "tbh": 13238, "scream": 13239, "chevrolet": 13240, "pixel": 13241, "shepherd": 13242, "anor": 13243, "gabriel": 13244, "twood": 13245, "sdcc": 13246, "meters": 13247, "developers": 13248, "closure": 13249, "vw": 13250, "twitch": 13251, "ìĹ": 13252, "seoul": 13253, "price": 13254, "hog": 13255, "nish": 13256, "hillary": 13257, "scratch": 13258, "incen": 13259, "wagon": 13260, "disability": 13261, "panther": 13262, "chats": 13263, "gd": 13264, "witz": 13265, "sussex": 13266, "late": 13267, "denmark": 13268, "gerald": 13269, "cancelled": 13270, "nette": 13271, "ix": 13272, "naval": 13273, "baptist": 13274, "tet": 13275, "yad": 13276, "math": 13277, "hoy": 13278, "randy": 13279, "point": 13280, "intellec": 13281, "fruits": 13282, "wool": 13283, "guin": 13284, "pron": 13285, "theft": 13286, "condem": 13287, "marry": 13288, "nola": 13289, "architects": 13290, "cincin": 13291, "rockets": 13292, "gentleman": 13293, "explan": 13294, "tate": 13295, "doe": 13296, "raises": 13297, "wildlife": 13298, "wl": 13299, "insider": 13300, "blanc": 13301, "wp": 13302, "forsale": 13303, "nyc": 13304, "powell": 13305, "unbelievable": 13306, "pens": 13307, "goodies": 13308, "mustang": 13309, "pens": 13310, "stays": 13311, "squash": 13312, "xoxo": 13313, "nearby": 13314, "everton": 13315, "coco": 13316, "leagu": 13317, "khan": 13318, "stud": 13319, "southwest": 13320, "construc": 13321, "sworth": 13322, "croatia": 13323, "lea": 13324, "sums": 13325, "aims": 13326, "ean": 13327, "vaness": 13328, "itious": 13329, "pathy": 13330, "arcade": 13331, "bend": 13332, "suggests": 13333, "sacram": 13334, "royals": 13335, "rier": 13336, "emir": 13337, "incl": 13338, "ank": 13339, "clark": 13340, "right": 13341, "vacc": 13342, "ा": 13343, "tane": 13344, "lib": 13345, "usc": 13346, "sales": 13347, "huh": 13348, "sally": 13349, "vera": 13350, "pga": 13351, "grows": 13352, "drum": 13353, "tree": 13354, "ethics": 13355, "suggest": 13356, "isab": 13357, "sealed": 13358, "previously": 13359, "animated": 13360, "abdu": 13361, "rises": 13362, "glob": 13363, "predat": 13364, "scarf": 13365, "delic": 13366, "omar": 13367, "lli": 13368, "sxsw": 13369, "python": 13370, "nebra": 13371, "funk": 13372, "reflect": 13373, "pavilion": 13374, "tically": 13375, "chasing": 13376, "bakery": 13377, "invasion": 13378, "koh": 13379, "believed": 13380, "cohen": 13381, "conqu": 13382, "crafts": 13383, "nati": 13384, "clever": 13385, "governance": 13386, "samples": 13387, "fails": 13388, "âĶ": 13389, "timo": 13390, "ritu": 13391, "striking": 13392, "inclusive": 13393, "shocking": 13394, "cant": 13395, "requires": 13396, "drawings": 13397, "à¸Ń": 13398, "purchased": 13399, "dum": 13400, "zach": 13401, "warner": 13402, "console": 13403, "mansion": 13404, "fountain": 13405, "circum": 13406, "esh": 13407, "island": 13408, "milk": 13409, "profits": 13410, "halifax": 13411, "rival": 13412, "âľĪï¸ı": 13413, "jenny": 13414, "sandra": 13415, "nye": 13416, "kelly": 13417, "yal": 13418, "quad": 13419, "nos": 13420, "instein": 13421, "finalists": 13422, "midfielder": 13423, "cue": 13424, "exceptional": 13425, "aan": 13426, "sapp": 13427, "gettin": 13428, "saa": 13429, "fati": 13430, "slice": 13431, "volk": 13432, "swal": 13433, "lasting": 13434, "summary": 13435, "itas": 13436, "smo": 13437, "sz": 13438, "âĺĨ": 13439, "ipl": 13440, "flames": 13441, "enews": 13442, "hav": 13443, "hoodie": 13444, "pitcher": 13445, "windy": 13446, "revol": 13447, "central": 13448, "tonite": 13449, "ðŁİīðŁİī": 13450, "solved": 13451, "milwau": 13452, "organizations": 13453, "weets": 13454, "refin": 13455, "sth": 13456, "ãĄ¼": 13457, "elin": 13458, "tona": 13459, "cinnamon": 13460, "ðŁİ¨": 13461, "ðŁİģ": 13462, "ronaldo": 13463, "peninsu": 13464, "omega": 13465, "elds": 13466, "designing": 13467, "eigh": 13468, "bluet": 13469, "benz": 13470, "nug": 13471, "asha": 13472, "robots": 13473, "sudan": 13474, "choosing": 13475, "endo": 13476, "serge": 13477, "closely": 13478, "handy": 13479, "finger": 13480, "being": 13481, "arte": 13482, "survived": 13483, "flame": 13484, "milestone": 13485, "gut": 13486, "dwar": 13487, "futures": 13488, "ée": 13489, "elo": 13490, "fridge": 13491, "elic": 13492, "ouch": 13493, "ub": 13494, "pv": 13495, "titan": 13496, "collar": 13497, "station": 13498, "nevada": 13499, "aurora": 13500, "rd": 13501, "duncan": 13502, "âģł": 13503, "brien": 13504, "marsh": 13505, "о": 13506, "total": 13507, "chry": 13508, "sers": 13509, "suffe": 13510, "rachel": 13511, "college": 13512, "todays": 13513, "courts": 13514, "chit": 13515, "reunited": 13516, "gymna": 13517, "genesis": 13518, "beside": 13519, "representation": 13520, "chant": 13521, "collector": 13522, "rak": 13523, "athens": 13524, "nigh": 13525, "munich": 13526, "languages": 13527, "flu": 13528, "participation": 13529, "___": 13530, "cv": 13531, "spectrum": 13532, "soda": 13533, "cover": 13534, "referen": 13535, "abbo": 13536, "apa": 13537, "publication": 13538, "edm": 13539, "monica": 13540, "army": 13541, "ðŁļĢ": 13542, "divor": 13543, "dry": 13544, "streams": 13545, "robotics": 13546, "cider": 13547, "bullying": 13548, "approval": 13549, "stoke": 13550, "platforms": 13551, "sierra": 13552, "extin": 13553, "ib": 13554, "hayes": 13555, "succeed": 13556, "suffer": 13557, "atically": 13558, "dai": 13559, "lynch": 13560, "hound": 13561, "delines": 13562, "acknow": 13563, "dated": 13564, "exclusively": 13565, "heres": 13566, "facilit": 13567, "damaged": 13568, "charter": 13569, "lakers": 13570, "falcon": 13571, "unveiled": 13572, "welove": 13573, "ease": 13574, "patience": 13575, "lone": 13576, "gentle": 13577, "genetic": 13578, "producing": 13579, "gour": 13580, "shannon": 13581, "bilities": 13582, "zimbabwe": 13583, "pint": 13584, "daughters": 13585, "literary": 13586, "belle": 13587, "clam": 13588, "surrounded": 13589, "kany": 13590, "neil": 13591, "pirate": 13592, "ranger": 13593, "hbd": 13594, "natalie": 13595, "belong": 13596, "olympi": 13597, "embassy": 13598, "scol": 13599, "ener": 13600, "akin": 13601, "loren": 13602, "bh": 13603, ":/": 13604, "diva": 13605, "denim": 13606, "hipp": 13607, "ðŁĩµðŁĩ": 13608, "arnold": 13609, "?'": 13610, "weren": 13611, "empower": 13612, "disabled": 13613, "manor": 13614, "raspberry": 13615, "baf": 13616, "awful": 13617, "drummer": 13618, "kardashi": 13619, "nash": 13620, "machinelearning": 13621, "chu": 13622, "rebels": 13623, "timing": 13624, "monroe": 13625, "tongue": 13626, "range": 13627, "pupils": 13628, "ress": 13629, "amazon": 13630, "bz": 13631, "harley": 13632, "palmer": 13633, "balloon": 13634, "sings": 13635, "icec": 13636, "jb": 13637, "cers": 13638, "gps": 13639, "whist": 13640, "rise": 13641, "lt": 13642, "oooo": 13643, "cattle": 13644, "shooter": 13645, "vodka": 13646, "ucl": 13647, "mtg": 13648, "lesli": 13649, "jonas": 13650, "dispo": 13651, "atric": 13652, "stein": 13653, "vintage": 13654, "firms": 13655, "floyd": 13656, "cowboy": 13657, "soooo": 13658, "isaac": 13659, "warcraft": 13660, "disneyland": 13661, "beautiful": 13662, "beam": 13663, "franchise": 13664, "bun": 13665, "kag": 13666, "anon": 13667, "turbo": 13668, "sweep": 13669, "madein": 13670, "karachi": 13671, "detective": 13672, "pennsylvania": 13673, "controversi": 13674, "vitamin": 13675, "aside": 13676, "chronic": 13677, "describes": 13678, "removal": 13679, "hah": 13680, "aper": 13681, "tened": 13682, "uto": 13683, "badly": 13684, "mirac": 13685, "fry": 13686, "yea": 13687, "injec": 13688, "thermal": 13689, "compact": 13690, "thor": 13691, "teed": 13692, "urgent": 13693, "lite": 13694, "gilli": 13695, "sophom": 13696, "ico": 13697, "chem": 13698, "pm": 13699, "fork": 13700, "freak": 13701, "chak": 13702, "recipient": 13703, "iy": 13704, "nik": 13705, "modeling": 13706, "cans": 13707, "ðŁıĢ": 13708, "delux": 13709, "seam": 13710, "survivors": 13711, "radical": 13712, "investigating": 13713, "reliable": 13714, "fm": 13715, "turt": 13716, "lighthouse": 13717, "tool": 13718, "gown": 13719, "))": 13720, "bots": 13721, "autograph": 13722, "aid": 13723, "buffe": 13724, "hmm": 13725, "horrible": 13726, "ssional": 13727, "anni": 13728, "à¹Ģ": 13729, "kits": 13730, "schi": 13731, "eternal": 13732, "huss": 13733, "sensitive": 13734, "ru": 13735, "tastes": 13736, "checks": 13737, "imo": 13738, "portion": 13739, "skate": 13740, "eden": 13741, "halftime": 13742, "fried": 13743, "rihanna": 13744, "tise": 13745, "flick": 13746, "cain": 13747, "sgt": 13748, "âľĶ": 13749, "shau": 13750, "stained": 13751, "raffle": 13752, "drove": 13753, "salman": 13754, "principles": 13755, "sho": 13756, "aru": 13757, "jess": 13758, "guine": 13759, "garbage": 13760, "myan": 13761, "jelly": 13762, "disru": 13763, "zia": 13764, "qld": 13765, "entries": 13766, "lav": 13767, "flew": 13768, "admit": 13769, "objects": 13770, "compare": 13771, "nytimes": 13772, "cannes": 13773, "pn": 13774, "suffol": 13775, "roc": 13776, "dana": 13777, "egg": 13778, "hist": 13779, "counsel": 13780, "'!": 13781, "physi": 13782, "imagination": 13783, "adjust": 13784, "explosion": 13785, "plymouth": 13786, "horror": 13787, "elliott": 13788, "bourne": 13789, "dex": 13790, "breed": 13791, "audio": 13792, "lobster": 13793, "disappointed": 13794, "nationwide": 13795, "((": 13796, "increases": 13797, "australi": 13798, "cedar": 13799, "staring": 13800, "racial": 13801, "eis": 13802, "gmt": 13803, "visions": 13804, "stayed": 13805, "discussions": 13806, "dean": 13807, "curtis": 13808, "maiden": 13809, "stellar": 13810, "happiest": 13811, "hwy": 13812, "preseason": 13813, "carav": 13814, "mondays": 13815, "hospitals": 13816, "glimpse": 13817, "scholars": 13818, "jai": 13819, "terrace": 13820, "anna": 13821, "goose": 13822, "graded": 13823, "lotus": 13824, "hung": 13825, "grocery": 13826, "stamps": 13827, "emperor": 13828, "scoop": 13829, "inser": 13830, "cas": 13831, "existence": 13832, "heal": 13833, "falcons": 13834, "marvel": 13835, "reducing": 13836, "terrific": 13837, "magnetic": 13838, "performs": 13839, "barre": 13840, "pus": 13841, "treating": 13842, "icon": 13843, "wh": 13844, "declared": 13845, "trauma": 13846, "dod": 13847, "comedian": 13848, "nikon": 13849, "bugs": 13850, "asm": 13851, "montgom": 13852, "ibiza": 13853, "comprehensive": 13854, "has": 13855, "santi": 13856, "fellowship": 13857, "dash": 13858, "psal": 13859, "louisville": 13860, "spy": 13861, "fault": 13862, "dthe": 13863, "filed": 13864, "vista": 13865, "desc": 13866, "fears": 13867, "youtu": 13868, "sps": 13869, "esp": 13870, "rig": 13871, "crime": 13872, "berger": 13873, "wonderland": 13874, "kent": 13875, "informed": 13876, "stevens": 13877, "myth": 13878, "aston": 13879, "iri": 13880, "visitor": 13881, "atri": 13882, "producers": 13883, "alla": 13884, "personally": 13885, "separate": 13886, "agencies": 13887, "afri": 13888, "ilan": 13889, "spoke": 13890, "nina": 13891, "squad": 13892, "dives": 13893, "depend": 13894, "liv": 13895, "fierce": 13896, "entertaining": 13897, "chain": 13898, "scat": 13899, "borders": 13900, "palette": 13901, "spro": 13902, "osis": 13903, "derby": 13904, "tobacco": 13905, "zio": 13906, "willie": 13907, "juvent": 13908, "zoom": 13909, "holy": 13910, "entirely": 13911, "afe": 13912, "martinez": 13913, "beds": 13914, "pea": 13915, "bulldogs": 13916, "ðŁĩªðŁĩ": 13917, "ibm": 13918, "neon": 13919, "ethiopia": 13920, "teammates": 13921, "planting": 13922, "twer": 13923, "anytime": 13924, "forbes": 13925, "ón": 13926, "runway": 13927, "nervous": 13928, "roger": 13929, "pile": 13930, "chanc": 13931, "apocaly": 13932, "uw": 13933, "oi": 13934, "drought": 13935, "territory": 13936, "brick": 13937, "creatures": 13938, "goin": 13939, "waff": 13940, "gren": 13941, "southeast": 13942, "jean": 13943, "ambul": 13944, "edited": 13945, "strap": 13946, "cv": 13947, "aaron": 13948, "ãĄ»ãĄ»": 13949, "tsu": 13950, "description": 13951, "kindly": 13952, "clutch": 13953, "immer": 13954, "enor": 13955, "womensday": 13956, "orange": 13957, "rag": 13958, "obvious": 13959, "hyder": 13960, "channels": 13961, "mango": 13962, "meyer": 13963, "raining": 13964, "getty": 13965, "pilgri": 13966, "coordinator": 13967, "upload": 13968, "nintendo": 13969, "donuts": 13970, "sanchez": 13971, "apparel": 13972, "jr": 13973, "zzi": 13974, ",@": 13975, "jefferson": 13976, "accessible": 13977, "greatly": 13978, "eid": 13979, "initial": 13980, "buddha": 13981, "paris": 13982, "mascot": 13983, "â¬ĩï¸ı": 13984, "schwar": 13985, "siri": 13986, "spinning": 13987, "mortgage": 13988, "echo": 13989, "endange": 13990, "gedly": 13991, "chloe": 13992, "enhance": 13993, "karnat": 13994, "kry": 13995, "explores": 13996, "ðŁēģ": 13997, "affair": 13998, "icals": 13999, "alla": 14000, "dart": 14001, "dolphins": 14002, "differences": 14003, "squirrel": 14004, "augh": 14005, "drones": 14006, "ellen": 14007, "restore": 14008, "paw": 14009, "unfor": 14010, "pike": 14011, "hilton": 14012, "collab": 14013, "consumers": 14014, "coinci": 14015, "outcomes": 14016, "ppp": 14017, "aq": 14018, "coupon": 14019, "liest": 14020, "sims": 14021, "kho": 14022, "aves": 14023, "spoon": 14024, "pudding": 14025, "corbyn": 14026, "haters": 14027, "exams": 14028, "slave": 14029, ".!": 14030, "psa": 14031, "apples": 14032, "tamil": 14033, "sed": 14034, "coke": 14035, "zzo": 14036, "losange": 14037, "carbon": 14038, "clair": 14039, "...)": 14040, "khu": 14041, "craig": 14042, "exploration": 14043, "sanctuary": 14044, "sue": 14045, "alway": 14046, "dementia": 14047, "wonders": 14048, "superhero": 14049, "pakistani": 14050, "browns": 14051, "bluetooth": 14052, "locker": 14053, "marc": 14054, "eventu": 14055, "deluxe": 14056, "rodriguez": 14057, "âĿ¤âĿ¤": 14058, "robb": 14059, "ðŁē¦": 14060, "linux": 14061, "tens": 14062, "intelligent": 14063, "seed": 14064, "voter": 14065, "sler": 14066, "peaks": 14067, "intern": 14068, "teenage": 14069, "peninsula": 14070, "handling": 14071, "tie": 14072, "cousins": 14073, "wendy": 14074, "mee": 14075, "à¹Ģà¸": 14076, "dino": 14077, "ðŁē°": 14078, "ðŁĺĄ": 14079, "zee": 14080, "sbury": 14081, "tragedy": 14082, "bk": 14083, "bore": 14084, "zin": 14085, "warns": 14086, "idiot": 14087, "touching": 14088, "continental": 14089, "tacos": 14090, "safari": 14091, "washed": 14092, "podium": 14093, "morrison": 14094, "forests": 14095, "cbc": 14096, "alon": 14097, "particular": 14098, "beads": 14099, "invented": 14100, "loch": 14101, "lighter": 14102, "wherever": 14103, "ide": 14104, "documents": 14105, "awe": 14106, "kr": 14107, "nowhere": 14108, "miner": 14109, "stit": 14110, "rox": 14111, "contribute": 14112, "hardy": 14113, "clan": 14114, "object": 14115, "cait": 14116, "ðŁēķðŁēķ": 14117, "happier": 14118, "vegetables": 14119, "tart": 14120, "gag": 14121, "nominee": 14122, "heavily": 14123, "panic": 14124, "jd": 14125, "theresa": 14126, "atm": 14127, "uph": 14128, "sfc": 14129, "suri": 14130, "drink": 14131, "nal": 14132, "revel": 14133, "kl": 14134, "avocado": 14135, "nomination": 14136, "madonna": 14137, "sharon": 14138, "malcolm": 14139, "controlled": 14140, "shers": 14141, "revival": 14142, "legislation": 14143, "shoots": 14144, "nin": 14145, "commentary": 14146, "pros": 14147, "humanrights": 14148, "stranger": 14149, "mitch": 14150, "pipeline": 14151, "legally": 14152, "thu": 14153, "gilbert": 14154, "toll": 14155, "granted": 14156, "ghs": 14157, "iranian": 14158, "refreshing": 14159, "duk": 14160, "abi": 14161, "prime": 14162, "joseph": 14163, "mosa": 14164, "statistics": 14165, "productions": 14166, "merry": 14167, "patel": 14168, "sax": 14169, "humanitarian": 14170, "structures": 14171, "emissions": 14172, "towns": 14173, "freel": 14174, "stering": 14175, "ratings": 14176, "allegedly": 14177, "cabin": 14178, "stl": 14179, "wade": 14180, "flyers": 14181, "trim": 14182, "promising": 14183, "zu": 14184, "ballot": 14185, "comparison": 14186, "freeze": 14187, "outer": 14188, "greatness": 14189, "assign": 14190, "snowy": 14191, "rale": 14192, "tories": 14193, "mediter": 14194, "knock": 14195, "consultant": 14196, "cincinnati": 14197, "analyst": 14198, "scoo": 14199, "jews": 14200, "approxim": 14201, "pure": 14202, "portraits": 14203, "cyrus": 14204, "ational": 14205, "loans": 14206, "acquis": 14207, "elu": 14208, "acceptable": 14209, "union": 14210, "watercolor": 14211, "rust": 14212, "battles": 14213, "perfu": 14214, "seasonal": 14215, "serial": 14216, "mindset": 14217, "riot": 14218, "feld": 14219, "ennial": 14220, "closet": 14221, "priest": 14222, "tanks": 14223, "intl": 14224, "screw": 14225, "bum": 14226, "abdul": 14227, "oux": 14228, "explained": 14229, "rica": 14230, "imaging": 14231, "lawyers": 14232, "buried": 14233, "ãĄ»ãĄ»ãĄ»": 14234, "earl": 14235, "âĢķ": 14236, "lton": 14237, "restored": 14238, "stripes": 14239, "foss": 14240, "demands": 14241, "stealing": 14242, "alexis": 14243, "mund": 14244, "aker": 14245, "urus": 14246, "wardro": 14247, "hugs": 14248, "genre": 14249, "ego": 14250, "ƙĦ": 14251, "participated": 14252, "babes": 14253, "banquet": 14254, "tious": 14255, "hemi": 14256, "dsb": 14257, "lost": 14258, "milwaukee": 14259, "jenner": 14260, "gem": 14261, "outra": 14262, "loses": 14263, "idi": 14264, "reps": 14265, "ðŁİ§": 14266, "regulation": 14267, "flaw": 14268, "fang": 14269, "vibrant": 14270, "ramp": 14271, "rains": 14272, "wellbeing": 14273, "soviet": 14274, "viewers": 14275, "depo": 14276, "libraries": 14277, "bigo": 14278, "sery": 14279, "gill": 14280, "destruction": 14281, "coz": 14282, "cx": 14283, "bridal": 14284, "alds": 14285, "planted": 14286, "amateur": 14287, "lud": 14288, "cheering": 14289, "showcas": 14290, "profile": 14291, "iu": 14292, "vertical": 14293, "packers": 14294, "wizard": 14295, "skip": 14296, "slight": 14297, "beau": 14298, "airways": 14299, "much": 14300, "rera": 14301, "ðŁĮĬ": 14302, "absor": 14303, "patio": 14304, "packages": 14305, "sells": 14306, "mentally": 14307, "ðŁĺ¢": 14308, "reynolds": 14309, "kare": 14310, "tribun": 14311, "walt": 14312, "knit": 14313, "taste": 14314, "surrey": 14315, "bounce": 14316, "creature": 14317, "bare": 14318, "betting": 14319, "sure": 14320, "miley": 14321, "laughs": 14322, "alore": 14323, "cyn": 14324, "tl": 14325, "artist": 14326, "annah": 14327, "warmer": 14328, "dynamics": 14329, "lunchtime": 14330, "maritime": 14331, "vulnerable": 14332, "ðŁēĄ": 14333, "wolver": 14334, "durham": 14335, "constantly": 14336, "amin": 14337, "sibl": 14338, ":@": 14339, "bullet": 14340, "kach": 14341, "angelo": 14342, "wilder": 14343, "doom": 14344, "desktop": 14345, "lawsuit": 14346, "kca": 14347, "henderson": 14348, "inviting": 14349, "betty": 14350, "tawards": 14351, "rafa": 14352, "leaked": 14353, "andi": 14354, "gems": 14355, "afl": 14356, "velo": 14357, "mediterran": 14358, "probe": 14359, "totten": 14360, "stephanie": 14361, "snation": 14362, "combe": 14363, "qs": 14364, "overcome": 14365, "assassin": 14366, "rav": 14367, "filip": 14368, "winnipeg": 14369, "shil": 14370, "determined": 14371, "kas": 14372, "outre": 14373, "regret": 14374, "guides": 14375, "aaa": 14376, "ðŁĺĪ": 14377, "wives": 14378, "manife": 14379, "erly": 14380, "smy": 14381, "shima": 14382, "xing": 14383, "pixel": 14384, "jacob": 14385, "accommod": 14386, "toy": 14387, "ono": 14388, "poo": 14389, "tier": 14390, "answe": 14391, "ðŁēģ": 14392, "rosa": 14393, "lease": 14394, "belongs": 14395, "thar": 14396, "eventually": 14397, "neither": 14398, "goa": 14399, "skiing": 14400, "atra": 14401, "agh": 14402, "broadcasting": 14403, "fury": 14404, "pyram": 14405, "dice": 14406, "volkswag": 14407, "womens": 14408, "provider": 14409, "bombs": 14410, "missile": 14411, "whip": 14412, "dick": 14413, "norwe": 14414, "backup": 14415, "elder": 14416, "mature": 14417, "concerts": 14418, "gious": 14419, "squee": 14420, "goodmorning": 14421, "braves": 14422, "^_": 14423, "aussie": 14424, "luna": 14425, "males": 14426, "heck": 14427, "fortn": 14428, "romeo": 14429, "steelers": 14430, "pn": 14431, "peer": 14432, "represents": 14433, "«": 14434, "katy": 14435, "miguel": 14436, "require": 14437, "chains": 14438, "lur": 14439, "immediate": 14440, "timber": 14441, "âĸ¶ï¸ı": 14442, "advocacy": 14443, "export": 14444, "anz": 14445, "tiffany": 14446, "author": 14447, "ðŁİĪ": 14448, "dudes": 14449, "chilly": 14450, "hid": 14451, "harm": 14452, "bug": 14453, "monster": 14454, "terrier": 14455, "tuc": 14456, "storytelling": 14457, "tak": 14458, "inti": 14459, "immigrants": 14460, "bis": 14461, "reaches": 14462, "compassion": 14463, "johnny": 14464, "contributions": 14465, "ðŁIJ¶": 14466, "mechanical": 14467, "impression": 14468, "ranks": 14469, "kobe": 14470, "menting": 14471, "blossom": 14472, "pablo": 14473, "builder": 14474, "bombing": 14475, "twel": 14476, "sullivan": 14477, "omo": 14478, "pete": 14479, "demi": 14480, "kudos": 14481, "wbb": 14482, "tgif": 14483, "massach": 14484, "neighbor": 14485, "chefs": 14486, "engines": 14487, "pune": 14488, "gained": 14489, "phantom": 14490, "sdays": 14491, "extend": 14492, "gran": 14493, "centers": 14494, "jacqu": 14495, "datasci": 14496, "sleepy": 14497, "elvis": 14498, "answered": 14499, "slot": 14500, "cony": 14501, "flexible": 14502, "tially": 14503, "letics": 14504, "%,": 14505, "andrews": 14506, "sible": 14507, "momma": 14508, "vino": 14509, "dox": 14510, "invitational": 14511, "twilight": 14512, "jade": 14513, "illery": 14514, "johns": 14515, "fou": 14516, "pv": 14517, "--->": 14518, "breakdown": 14519, "billion": 14520, "printer": 14521, "mond": 14522, "cbc": 14523, "maggie": 14524, "legion": 14525, "dub": 14526, "kurt": 14527, "poor": 14528, "parenting": 14529, "regions": 14530, "bikini": 14531, "beware": 14532, "sional": 14533, "auburn": 14534, "kidding": 14535, "amples": 14536, "span": 14537, "contempor": 14538, "cic": 14539, "habits": 14540, "ako": 14541, "prefe": 14542, "buddies": 14543, "itz": 14544, "emily": 14545, "personnel": 14546, "mountain": 14547, "versus": 14548, "ðŁĺ¬": 14549, "earning": 14550, "sink": 14551, "dari": 14552, "uu": 14553, "swin": 14554, "ister": 14555, "brutal": 14556, "nac": 14557, "kata": 14558, "cloth": 14559, "amand": 14560, "ðŁĶĹ": 14561, "neo": 14562, "alumin": 14563, "weekends": 14564, "nebraska": 14565, "codes": 14566, "delayed": 14567, "bruno": 14568, "proven": 14569, "inc": 14570, "ight": 14571, "flan": 14572, "oro": 14573, "lambert": 14574, "regulat": 14575, "wf": 14576, "massachuse": 14577, "kardashian": 14578, "bernard": 14579, "fiesta": 14580, "volcano": 14581, "grandpa": 14582, "anca": 14583, "dre": 14584, "stitu": 14585, "meaning": 14586, "foam": 14587, "auck": 14588, "ated": 14589, "rl": 14590, "hotel": 14591, "persons": 14592, "dynasty": 14593, "ellor": 14594, "mai": 14595, "amne": 14596, "styling": 14597, "avier": 14598, "eg": 14599, "vegetarian": 14600, ",â̦": 14601, "founders": 14602, "stain": 14603, "gd": 14604, "cycles": 14605, "skyline": 14606, "tractor": 14607, "exists": 14608, "tral": 14609, "kidney": 14610, "maril": 14611, "instag": 14612, "sette": 14613, "addict": 14614, "triangle": 14615, "flashback": 14616, "controversial": 14617, "zon": 14618, "pins": 14619, "ias": 14620, "tray": 14621, "township": 14622, "delegates": 14623, "spam": 14624, "hms": 14625, "crane": 14626, "peoples": 14627, "olo": 14628, "faction": 14629, "butes": 14630, "onica": 14631, "delegation": 14632, "newprofile": 14633, "elier": 14634, "mca": 14635, "wand": 14636, "gely": 14637, "losangeles": 14638, "berke": 14639, "tive": 14640, "disrup": 14641, "zza": 14642, "casa": 14643, "jordan": 14644, "fordshire": 14645, "gathered": 14646, "ichi": 14647, "attendees": 14648, "à¸Ńà¸": 14649, "peppers": 14650, "coin": 14651, "bourbon": 14652, "ernity": 14653, "rotary": 14654, "behaviour": 14655, "jeremy": 14656, "teamwork": 14657, "compliance": 14658, "tremend": 14659, "ðŁĩ§": 14660, "buhari": 14661, "cambo": 14662, "buyers": 14663, "hagen": 14664, "buds": 14665, "bayern": 14666, "monte": 14667, "smells": 14668, "anza": 14669, "athlon": 14670, "described": 14671, "workforce": 14672, "giving": 14673, "api": 14674, "investments": 14675, "dail": 14676, "selena": 14677, "database": 14678, "thum": 14679, "mortal": 14680, "student": 14681, "buyer": 14682, "dover": 14683, "garten": 14684, "attle": 14685, "loyalty": 14686, "genoci": 14687, "holocau": 14688, "theaters": 14689, "ruling": 14690, "venus": 14691, "patent": 14692, "chun": 14693, "abby": 14694, "awake": 14695, "massacre": 14696, "bangalore": 14697, "breaking": 14698, "simmons": 14699, "justi": 14700, "hale": 14701, "edchat": 14702, "ggles": 14703, "hawk": 14704, "marking": 14705, "headlines": 14706, "strom": 14707, "cove": 14708, "breathtaking": 14709, "medals": 14710, "haircut": 14711, "christine": 14712, "telegraph": 14713, "gujarat": 14714, "jura": 14715, "cane": 14716, "shore": 14717, "propaganda": 14718, "mueller": 14719, "........": 14720, "savi": 14721, "stomach": 14722, "throws": 14723, "tab": 14724, "warm": 14725, "jong": 14726, "renowned": 14727, "hir": 14728, "rais": 14729, "mushrooms": 14730, "guaranteed": 14731, "boa": 14732, "mj": 14733, "revolutionary": 14734, "certification": 14735, "bruins": 14736, "join": 14737, "wes": 14738, "passport": 14739, "cg": 14740, "sexu": 14741, "capable": 14742, "wv": 14743, "tones": 14744, "jackets": 14745, "accompan": 14746, "spinach": 14747, "forever": 14748, "blair": 14749, "watts": 14750, "gl": 14751, "couples": 14752, "prairie": 14753, "newprofilepic": 14754, "logistics": 14755, "massachusetts": 14756, "jaguar": 14757, "oid": 14758, "weal": 14759, "underwater": 14760, "moz": 14761, "yi": 14762, "maths": 14763, "myanmar": 14764, "preps": 14765, "suffered": 14766, "trace": 14767, "wali": 14768, "ahhh": 14769, "borg": 14770, "stitch": 14771, "culin": 14772, "realise": 14773, "infection": 14774, "discrimination": 14775, "shame": 14776, "ankle": 14777, "humid": 14778, "yt": 14779, "bracket": 14780, "truck": 14781, "triu": 14782, "easter": 14783, "community": 14784, "postcard": 14785, "involving": 14786, "tyler": 14787, "caramel": 14788, "overview": 14789, "examples": 14790, "integrity": 14791, "basement": 14792, "instruments": 14793, "anium": 14794, "atus": 14795, "gher": 14796, "laundry": 14797, "achieve": 14798, "geneva": 14799, "pricing": 14800, "hyderabad": 14801, "belief": 14802, "meta": 14803, "jaw": 14804, "accounting": 14805, "leader": 14806, "cristiano": 14807, "couture": 14808, "cyp": 14809, "vised": 14810, ",,,": 14811, "knu": 14812, "hick": 14813, "breaker": 14814, "bram": 14815, "rab": 14816, "moor": 14817, "hamas": 14818, "graduating": 14819, "puppies": 14820, "akh": 14821, "tah": 14822, "aches": 14823, "rie": 14824, "opini": 14825, "gta": 14826, "reign": 14827, "tragic": 14828, "rever": 14829, "pill": 14830, "pineapple": 14831, "touches": 14832, "dare": 14833, "leys": 14834, "ilo": 14835, "interiors": 14836, "scouts": 14837, "bart": 14838, "enzie": 14839, "dono": 14840, "brock": 14841, "christians": 14842, "ensemble": 14843, "·": 14844, "cinemas": 14845, "newport": 14846, "airline": 14847, "winston": 14848, "leigh": 14849, "contents": 14850, "prescri": 14851, "urge": 14852, "trout": 14853, "fically": 14854, "ilia": 14855, "subsi": 14856, "arer": 14857, "âļ¾ï¸ı": 14858, "wounded": 14859, "ðŁĻĤ": 14860, "pepper": 14861, "ðŁēŀ": 14862, "fitted": 14863, "aff": 14864, "resur": 14865, "thursdaythoughts": 14866, "zero": 14867, "archaeology": 14868, "div": 14869, "jee": 14870, "ion": 14871, "awaiting": 14872, "cozy": 14873, "beauties": 14874, "bald": 14875, "data": 14876, "grizz": 14877, "stalk": 14878, "kinds": 14879, "cleared": 14880, "jessic": 14881, "regular": 14882, "aliens": 14883, "place": 14884, "bos": 14885, "bizar": 14886, "thisis": 14887, "ðŁēĢ": 14888, "tottenham": 14889, "mafia": 14890, "slam": 14891, "ariana": 14892, "carroll": 14893, "backpack": 14894, "carey": 14895, "univ": 14896, "rg": 14897, "pep": 14898, "digit": 14899, "tattoos": 14900, "agon": 14901, "volunteering": 14902, "differen": 14903, "consumption": 14904, "kathr": 14905, "headphones": 14906, "tshirt": 14907, "ob": 14908, "element": 14909, "retail": 14910, "shru": 14911, "algori": 14912, "container": 14913, "conscious": 14914, "fil": 14915, "coming": 14916, "rash": 14917, "urope": 14918, "define": 14919, "gior": 14920, "feminist": 14921, "flowing": 14922, "routes": 14923, "glaci": 14924, "fert": 14925, "somerset": 14926, "antes": 14927, "tweeps": 14928, "$$": 14929, "hour": 14930, "endangered": 14931, "yearsof": 14932, "roh": 14933, "popped": 14934, "backing": 14935, "basil": 14936, "brake": 14937, "monaco": 14938, "lgbtq": 14939, "prague": 14940, "utility": 14941, "cassi": 14942, "gateway": 14943, "haunted": 14944, "schul": 14945, "ðŁİµ": 14946, "should": 14947, "walkingdead": 14948, "completing": 14949, "danny": 14950, "montgomery": 14951, "penguin": 14952, "ssi": 14953, "merchandi": 14954, "ðŁijij": 14955, "church": 14956, "hates": 14957, "captain": 14958, "breathing": 14959, "cet": 14960, "fairly": 14961, "approaches": 14962, "companion": 14963, "surprising": 14964, "kanye": 14965, "pey": 14966, "hindi": 14967, "targeted": 14968, "lords": 14969, "deut": 14970, "digging": 14971, "german": 14972, "rut": 14973, "energy": 14974, "closest": 14975, "yun": 14976, "apologi": 14977, "ั": 14978, "sack": 14979, "rup": 14980, "ddy": 14981, "portal": 14982, "dough": 14983, "bats": 14984, "ðŁĵ°": 14985, "atur": 14986, "grapher": 14987, "pires": 14988, "motors": 14989, "ðŁĮ¹": 14990, "jc": 14991, "dang": 14992, "tuk": 14993, "clue": 14994, "usc": 14995, "page": 14996, "dless": 14997, "brows": 14998, "jus": 14999, "ading": 15000, "remarks": 15001, "oom": 15002, "cardio": 15003, "stefan": 15004, "armstrong": 15005, "âĢ¢âĢ¢": 15006, "niest": 15007, "belgian": 15008, "biop": 15009, "soy": 15010, "lof": 15011, "Ć­Ä„": 15012, "qt": 15013, "flashbackfriday": 15014, "cee": 15015, "ģà¸": 15016, "wreck": 15017, "marines": 15018, "amendment": 15019, "wardrobe": 15020, "voy": 15021, "burned": 15022, "guitars": 15023, "rainf": 15024, "lifel": 15025, "ssil": 15026, "ounce": 15027, "external": 15028, "ckey": 15029, "mesh": 15030, "sheikh": 15031, "invitation": 15032, "suggesti": 15033, "popcorn": 15034, "phenomenal": 15035, "anonymous": 15036, "tuna": 15037, "chicago": 15038, "oval": 15039, "dely": 15040, "locals": 15041, "(&": 15042, "prof": 15043, "novel": 15044, "finder": 15045, "sparks": 15046, "laven": 15047, "infu": 15048, "nicks": 15049, "quant": 15050, "rae": 15051, "exec": 15052, "distingui": 15053, "stances": 15054, "mutual": 15055, "shal": 15056, "unveils": 15057, "edmonton": 15058, "zania": 15059, "adio": 15060, "viewer": 15061, "bradford": 15062, "auditorium": 15063, "quis": 15064, "react": 15065, "http": 15066, "lero": 15067, "cheeky": 15068, "impacts": 15069, "tak": 15070, "edt": 15071, "desperate": 15072, "tay": 15073, "ƬĦ": 15074, "settle": 15075, "bargain": 15076, "resume": 15077, "unite": 15078, "thrown": 15079, "kest": 15080, "seys": 15081, "marching": 15082, "amit": 15083, "decline": 15084, "schar": 15085, "metr": 15086, "stanford": 15087, "linke": 15088, "berra": 15089, "dolls": 15090, "rugby": 15091, "jami": 15092, "bor": 15093, "roadtrip": 15094, "dinosaur": 15095, "mik": 15096, "sunder": 15097, "rem": 15098, "bk": 15099, "overseas": 15100, "naughty": 15101, "implementation": 15102, "iamsrk": 15103, "luncheon": 15104, "firing": 15105, "miami": 15106, "perez": 15107, "thee": 15108, "zon": 15109, "gifted": 15110, "conversion": 15111, "ceramic": 15112, "”ï¸ı": 15113, "pedro": 15114, "ìĨ": 15115, "vick": 15116, "!@": 15117, "heed": 15118, "sid": 15119, "bw": 15120, "document": 15121, "plun": 15122, "grants": 15123, "fantasy": 15124, "predictions": 15125, "valid": 15126, "carved": 15127, "graduated": 15128, "ðŁijįðŁı»": 15129, "nationally": 15130, "chy": 15131, "afl": 15132, "resso": 15133, "blank": 15134, "rivals": 15135, "jig": 15136, "eties": 15137, "omics": 15138, "unemp": 15139, "bound": 15140, "sko": 15141, "inspection": 15142, "paral": 15143, "highs": 15144, "crisp": 15145, "bans": 15146, "oba": 15147, "[@": 15148, "cospla": 15149, "costumes": 15150, "recall": 15151, "mouth": 15152, "nigel": 15153, "bts": 15154, "tera": 15155, "kov": 15156, "docs": 15157, "westminster": 15158, "dict": 15159, "gravity": 15160, "kari": 15161, "rogue": 15162, "tted": 15163, "wark": 15164, "idaho": 15165, "wend": 15166, "awi": 15167, "queensland": 15168, "processes": 15169, "cliffe": 15170, "mick": 15171, "compens": 15172, "opol": 15173, "they": 15174, "clari": 15175, "wikipedia": 15176, "salmankhan": 15177, "hazard": 15178, "preston": 15179, "sweetest": 15180, "pdf": 15181, "chees": 15182, "trilo": 15183, "southafrica": 15184, "burnt": 15185, "($": 15186, "contain": 15187, "tp": 15188, "submitted": 15189, "soundcloud": 15190, "atu": 15191, "rez": 15192, "wordpress": 15193, "corrupt": 15194, "nf": 15195, "maker": 15196, "Ć­Ä·": 15197, "paras": 15198, "advent": 15199, "rial": 15200, "cafe": 15201, "fossil": 15202, "!!!!!!!": 15203, "cows": 15204, "cj": 15205, "spur": 15206, "institutions": 15207, "landmark": 15208, "entit": 15209, "reut": 15210, "his": 15211, "alzheim": 15212, "wemb": 15213, "reggae": 15214, "mosqu": 15215, "stat": 15216, "identified": 15217, "dealer": 15218, "ream": 15219, "reland": 15220, "tension": 15221, "ðŁĩ©": 15222, "wrapping": 15223, "deeper": 15224, "frat": 15225, "reddit": 15226, "aris": 15227, "morocco": 15228, "..\"": 15229, "blow": 15230, "mapping": 15231, "priorities": 15232, "inga": 15233, "swap": 15234, "rewards": 15235, "conspiracy": 15236, "creative": 15237, "cj": 15238, "congressional": 15239, "vault": 15240, "plex": 15241, "sophomore": 15242, "shadow": 15243, "eless": 15244, "ðŁĺħ": 15245, "darts": 15246, "aldub": 15247, "annoying": 15248, "props": 15249, "nas": 15250, "aluminum": 15251, "hbo": 15252, "offense": 15253, "jill": 15254, "onions": 15255, "laur": 15256, "tae": 15257, "hardest": 15258, "shro": 15259, "gaining": 15260, "measure": 15261, "edtech": 15262, "cyprus": 15263, "tara": 15264, "angeli": 15265, "carlo": 15266, "goon": 15267, "alli": 15268, "implic": 15269, "jupit": 15270, "resilience": 15271, "hail": 15272, "balanced": 15273, ")...": 15274, "joyce": 15275, "gra": 15276, "theli": 15277, "defined": 15278, "shipped": 15279, "mainly": 15280, "mina": 15281, "lm": 15282, "sacri": 15283, "ober": 15284, "pim": 15285, "claiming": 15286, "enters": 15287, "corey": 15288, "bok": 15289, "cried": 15290, "cooling": 15291, "danielle": 15292, "pharmacy": 15293, "thorough": 15294, "cake": 15295, "klo": 15296, "outreach": 15297, "zens": 15298, "digitalmarketing": 15299, "valent": 15300, "snp": 15301, "herb": 15302, "mrw": 15303, "café": 15304, "captures": 15305, "notre": 15306, "triumph": 15307, "pancakes": 15308, "cumber": 15309, "spike": 15310, "dation": 15311, "bigg": 15312, "sper": 15313, "critical": 15314, "amal": 15315, "tooth": 15316, "founding": 15317, "astro": 15318, "'#": 15319, "quantum": 15320, "thames": 15321, "unc": 15322, "pride": 15323, "airbus": 15324, "knocked": 15325, "undefeated": 15326, "mediterranean": 15327, "calcu": 15328, "clown": 15329, "sensor": 15330, "hammer": 15331, "forgive": 15332, "cushi": 15333, "berry": 15334, "majestic": 15335, "elect": 15336, "politan": 15337, "gta": 15338, "kari": 15339, "burke": 15340, "seahawks": 15341, "volkswagen": 15342, "rei": 15343, "landscapes": 15344, "casu": 15345, "grandfather": 15346, "listened": 15347, "//": 15348, "startrek": 15349, "rainfall": 15350, "furry": 15351, "vier": 15352, "stark": 15353, "rifle": 15354, "ffa": 15355, "leges": 15356, "hillaryclinton": 15357, "minus": 15358, "correctly": 15359, "architectural": 15360, "prece": 15361, "upside": 15362, "boxer": 15363, "ðŁĻĮðŁı¼": 15364, "isai": 15365, "det": 15366, "provo": 15367, "tissue": 15368, "spooky": 15369, "veled": 15370, "recon": 15371, "prospects": 15372, "quebec": 15373, "âļ«": 15374, "igno": 15375, "anatomy": 15376, "shapes": 15377, "wp": 15378, "pinterest": 15379, "hore": 15380, "anes": 15381, "pickup": 15382, "tip": 15383, "pradesh": 15384, "hugh": 15385, "coe": 15386, "pok": 15387, "grammy": 15388, "wellington": 15389, "stigate": 15390, "righ": 15391, "leap": 15392, "kingston": 15393, "scenic": 15394, "gosh": 15395, "vani": 15396, "aug": 15397, "sary": 15398, "zier": 15399, "bureau": 15400, "linson": 15401, "conte": 15402, "fragr": 15403, "allan": 15404, "gaw": 15405, "lana": 15406, "collision": 15407, "surveill": 15408, "renais": 15409, "arrange": 15410, "sali": 15411, "doin": 15412, "brance": 15413, "brendan": 15414, "ourse": 15415, "incoming": 15416, "suspension": 15417, "Ć Ā“": 15418, "lla": 15419, "educators": 15420, "intri": 15421, "dae": 15422, "biography": 15423, "bulgar": 15424, "villain": 15425, "gothic": 15426, "rwanda": 15427, "ew": 15428, "mayor": 15429, "meetup": 15430, "democrat": 15431, "morgan": 15432, "sudden": 15433, "tesco": 15434, "carrot": 15435, "bomber": 15436, "mckin": 15437, "rene": 15438, "funday": 15439, "agricultural": 15440, "hahah": 15441, "showtime": 15442, "forming": 15443, "cola": 15444, "scorpi": 15445, "quote": 15446, "poppy": 15447, "slife": 15448, "daz": 15449, "tub": 15450, "nen": 15451, "mot": 15452, "ðŁĺ»": 15453, "sore": 15454, "elderly": 15455, "ove": 15456, "skinny": 15457, "umi": 15458, "anco": 15459, "manship": 15460, "were": 15461, "gv": 15462, "kah": 15463, "folding": 15464, "neat": 15465, "samantha": 15466, "danish": 15467, "ukrain": 15468, "humidity": 15469, "nutri": 15470, "jakarta": 15471, "candles": 15472, "oooooooo": 15473, "atile": 15474, "strength": 15475, "ibra": 15476, "bapti": 15477, "charleston": 15478, "frames": 15479, "girls": 15480, "clearing": 15481, "gluten": 15482, "##": 15483, "supernatural": 15484, "jubi": 15485, "phone": 15486, "hein": 15487, "drun": 15488, "leak": 15489, "investor": 15490, "yer": 15491, "domain": 15492, "ballroom": 15493, "mish": 15494, "appli": 15495, "offshore": 15496, "blaze": 15497, "doro": 15498, "âĺķï¸ı": 15499, "winery": 15500, "sharif": 15501, "adore": 15502, "nir": 15503, "safer": 15504, "sigh": 15505, "ascri": 15506, "strongly": 15507, "tracy": 15508, "cker": 15509, "oll": 15510, "faithful": 15511, "eyed": 15512, "delightful": 15513, "vism": 15514, "karnataka": 15515, "titan": 15516, "whar": 15517, "jerseys": 15518, "refur": 15519, "heaven": 15520, "grip": 15521, "panama": 15522, "preli": 15523, "gluten": 15524, "odd": 15525, "content": 15526, "ponti": 15527, "tioning": 15528, "ecommerce": 15529, "federation": 15530, "flawless": 15531, "gear": 15532, "tires": 15533, "byr": 15534, "police": 15535, "cuban": 15536, "tributes": 15537, "ticul": 15538, "churches": 15539, "nursery": 15540, "diaries": 15541, "museums": 15542, "snapped": 15543, "ivan": 15544, "wight": 15545, "tourists": 15546, "ramadan": 15547, "trent": 15548, "prophet": 15549, "wondered": 15550, "focusing": 15551, "hid": 15552, "icons": 15553, "iq": 15554, "ambulance": 15555, "pist": 15556, "funniest": 15557, "timeless": 15558, "srilan": 15559, "buys": 15560, "kids": 15561, "colourful": 15562, "ashi": 15563, "chir": 15564, "mum": 15565, "ðŁĵļ": 15566, "letter": 15567, "xen": 15568, "reuters": 15569, "preserve": 15570, "inting": 15571, "step": 15572, "fuji": 15573, "univer": 15574, "iu": 15575, "showdown": 15576, "poems": 15577, "surveillance": 15578, "suspected": 15579, "tae": 15580, "solving": 15581, "tomb": 15582, "mothersday": 15583, "carpen": 15584, "recruit": 15585, "pilots": 15586, "broc": 15587, "mixing": 15588, "fridays": 15589, "tyr": 15590, "representatives": 15591, "trapped": 15592, "abdul": 15593, "freestyle": 15594, "cluster": 15595, "âļłï¸ı": 15596, "kd": 15597, "skill": 15598, "pitt": 15599, "exo": 15600, "commerci": 15601, "museum": 15602, "locally": 15603, "gina": 15604, "nobel": 15605, "immune": 15606, "frac": 15607, "capsu": 15608, "mained": 15609, "attempts": 15610, "bulldog": 15611, "bespoke": 15612, "singers": 15613, "spelling": 15614, "segment": 15615, "natures": 15616, "tick": 15617, "lipstick": 15618, "cleaner": 15619, "gettable": 15620, "precision": 15621, "â̼ï¸ı": 15622, "thood": 15623, "reef": 15624, "nope": 15625, "billy": 15626, "digi": 15627, "musi": 15628, "rival": 15629, "figured": 15630, "tality": 15631, "sunny": 15632, "berk": 15633, "awww": 15634, "awaits": 15635, "unreal": 15636, "copen": 15637, "asylum": 15638, "exotic": 15639, "buen": 15640, "mock": 15641, "enable": 15642, "archy": 15643, "fra": 15644, "plastic": 15645, "almond": 15646, "ampli": 15647, "displays": 15648, "abbott": 15649, "sme": 15650, "xp": 15651, "ðŁĻĄ": 15652, "graphic": 15653, "ived": 15654, "mara": 15655, "caution": 15656, "leaks": 15657, "enberg": 15658, "ulu": 15659, "unicorn": 15660, "cannon": 15661, "apprentic": 15662, "ðŁĺĺðŁĺĺ": 15663, "bball": 15664, "willow": 15665, "atics": 15666, "amas": 15667, "manufacturer": 15668, "campaigns": 15669, "porters": 15670, "floors": 15671, "lsu": 15672, "type": 15673, "kej": 15674, "honorary": 15675, "itim": 15676, "tole": 15677, "minecraft": 15678, "dx": 15679, "mash": 15680, "rio": 15681, "consequences": 15682, "ronald": 15683, "gossi": 15684, "suffolk": 15685, "muse": 15686, "rbi": 15687, "livemusic": 15688, "ivan": 15689, "ðŁİ¤": 15690, "leu": 15691, "patriot": 15692, "manit": 15693, "lanca": 15694, "homedecor": 15695, "dear": 15696, "sigma": 15697, "tide": 15698, "strings": 15699, "vita": 15700, "sequel": 15701, "tryna": 15702, "investigate": 15703, "boris": 15704, "vegan": 15705, "barrier": 15706, "mindfulness": 15707, "webb": 15708, "hustle": 15709, "inda": 15710, "tanzania": 15711, "stray": 15712, "texas": 15713, "cag": 15714, "diagnosis": 15715, "woman": 15716, "gw": 15717, "obsession": 15718, "lative": 15719, "nufc": 15720, "flynn": 15721, "momentum": 15722, "sofa": 15723, "wald": 15724, "vegetable": 15725, "tucker": 15726, "supper": 15727, "seab": 15728, "arro": 15729, "seag": 15730, "venting": 15731, "councill": 15732, "splat": 15733, "calcul": 15734, "..#": 15735, "comfy": 15736, "odisha": 15737, "stopp": 15738, "warfare": 15739, "caes": 15740, "Ć ĀØ": 15741, "coy": 15742, "priceless": 15743, "insec": 15744, "ðŁĺĽ": 15745, "controls": 15746, "empowerment": 15747, "datascience": 15748, "perpe": 15749, "genic": 15750, "eres": 15751, "trudeau": 15752, "mano": 15753, "slavery": 15754, "expanding": 15755, "mahe": 15756, "failing": 15757, "saga": 15758, "photographs": 15759, "crest": 15760, "reon": 15761, "surfing": 15762, "hie": 15763, "ðŁįĢ": 15764, "jae": 15765, "fellows": 15766, "southampton": 15767, "solom": 15768, "cester": 15769, "tability": 15770, "horn": 15771, "sect": 15772, "hee": 15773, "coleman": 15774, "atlas": 15775, "explorer": 15776, "consultation": 15777, "copyright": 15778, "organizing": 15779, "denied": 15780, "monkeys": 15781, "noodles": 15782, "bris": 15783, "flor": 15784, "dough": 15785, "bonds": 15786, "shocked": 15787, "ecosystem": 15788, "carefully": 15789, "wm": 15790, "apartments": 15791, "curve": 15792, "sandiego": 15793, "mustard": 15794, "commen": 15795, "ceremon": 15796, "ech": 15797, "ruth": 15798, "ðŁĻĮðŁı»": 15799, "hawai": 15800, "filmed": 15801, "tear": 15802, "asingly": 15803, "cair": 15804, "watt": 15805, "instrument": 15806, "outta": 15807, "yeol": 15808, "riverside": 15809, "ë°": 15810, ".:": 15811, "norwich": 15812, "alog": 15813, "migrants": 15814, "newman": 15815, "ride": 15816, "sprink": 15817, "targeting": 15818, "believe": 15819, "torch": 15820, "reflects": 15821, "permission": 15822, "ffman": 15823, "enemies": 15824, "basics": 15825, "seized": 15826, "sundays": 15827, "lei": 15828, "hassan": 15829, "endo": 15830, "hc": 15831, "stad": 15832, "lements": 15833, "kkkk": 15834, "nano": 15835, "shark": 15836, "mana": 15837, "onic": 15838, "treatments": 15839, "early": 15840, "collaborative": 15841, "shuttle": 15842, "branches": 15843, "misses": 15844, "mainedcm": 15845, "apers": 15846, "kyle": 15847, "carrie": 15848, "leisure": 15849, "shet": 15850, "birding": 15851, "advances": 15852, "ðŁĵĿ": 15853, "popular": 15854, "diane": 15855, "abe": 15856, "rewar": 15857, "neighbour": 15858, "kpop": 15859, "remembrance": 15860, "playground": 15861, "rub": 15862, "krishna": 15863, "ebola": 15864, "inquiry": 15865, "epa": 15866, "lumin": 15867, "organisation": 15868, "abraham": 15869, "normally": 15870, "preten": 15871, "janet": 15872, "wt": 15873, "ðŁēİ": 15874, "encouraging": 15875, "astic": 15876, "bump": 15877, "sydney": 15878, "sz": 15879, "ssss": 15880, "garrett": 15881, "ðŁĵ»": 15882, "consulting": 15883, "romania": 15884, "spotting": 15885, "chancellor": 15886, "arma": 15887, "prestigious": 15888, "ðĿIJ": 15889, "tad": 15890, "cryst": 15891, "competit": 15892, "ratio": 15893, "cataly": 15894, "brow": 15895, "jur": 15896, "viking": 15897, "commute": 15898, "yday": 15899, "layers": 15900, "dumb": 15901, "escal": 15902, "genocide": 15903, "fill": 15904, "gupta": 15905, "stepping": 15906, "sei": 15907, "foto": 15908, "wildcats": 15909, "coli": 15910, "project": 15911, "earnings": 15912, "str": 15913, "geons": 15914, "completion": 15915, "bm": 15916, "decorated": 15917, "crawford": 15918, "afghan": 15919, "scare": 15920, "visibility": 15921, "hib": 15922, "direction": 15923, "stroll": 15924, "christina": 15925, "alternate": 15926, "clare": 15927, "stylist": 15928, "behold": 15929, "sance": 15930, "leopard": 15931, "acquired": 15932, "narrative": 15933, "ashi": 15934, "thea": 15935, "????": 15936, "peas": 15937, "atch": 15938, "slides": 15939, "leen": 15940, "renewable": 15941, "english": 15942, "quir": 15943, "coaster": 15944, "rx": 15945, "fools": 15946, "matchday": 15947, "mism": 15948, "amazing": 15949, "zig": 15950, "keting": 15951, "wont": 15952, "towel": 15953, "diab": 15954, "stake": 15955, "nm": 15956, "melt": 15957, "ethan": 15958, "grape": 15959, "politician": 15960, "smen": 15961, "Ć­Äŗ": 15962, "reo": 15963, "weddings": 15964, "catcher": 15965, "oracle": 15966, "memo": 15967, "ðŁĮ“": 15968, "eck": 15969, "robbie": 15970, "norwegian": 15971, "operator": 15972, "amor": 15973, "sewing": 15974, "jul": 15975, "xie": 15976, "uv": 15977, "fifty": 15978, "mega": 15979, "tattoo": 15980, "liberals": 15981, "upri": 15982, "trafficking": 15983, "richardson": 15984, "suv": 15985, "kip": 15986, "messy": 15987, "tremendous": 15988, "glou": 15989, "courtney": 15990, "lad": 15991, "stereo": 15992, "myers": 15993, "idio": 15994, "^_^": 15995, "manning": 15996, "dye": 15997, "wd": 15998, "throne": 15999, "junk": 16000, "asu": 16001, "provincial": 16002, "kook": 16003, "wrc": 16004, "fineart": 16005, "hampshire": 16006, "renaissance": 16007, "bred": 16008, "fallout": 16009, "sj": 16010, "snl": 16011, "alam": 16012, "torture": 16013, "fyi": 16014, "shines": 16015, "paw": 16016, "char": 16017, "henry": 16018, "crow": 16019, "acious": 16020, "dian": 16021, "paige": 16022, "bare": 16023, "stockholm": 16024, "scenery": 16025, "ðŁĩ·": 16026, "jeffrey": 16027, "push": 16028, "decoration": 16029, "ned": 16030, "cute": 16031, "brigade": 16032, "lavender": 16033, "invites": 16034, "esports": 16035, "voir": 16036, "dried": 16037, "transpl": 16038, "surgeon": 16039, "novels": 16040, "pulls": 16041, "sony": 16042, "lunar": 16043, "mane": 16044, "ivy": 16045, "frustr": 16046, "dorset": 16047, "sai": 16048, "torres": 16049, "ssion": 16050, "shutdown": 16051, "suggestions": 16052, "writing": 16053, "eo": 16054, "battlefield": 16055, "uga": 16056, "ðŁIJ¾": 16057, "vacu": 16058, "splac": 16059, "git": 16060, "ug": 16061, "highland": 16062, "%)": 16063, "mermaid": 16064, "sacramento": 16065, "tails": 16066, "pw": 16067, "kah": 16068, "tell": 16069, "enhanced": 16070, "Ƭķ": 16071, "auckland": 16072, "cruel": 16073, "ðŁ¤©": 16074, "audre": 16075, "sailor": 16076, "grammar": 16077, "glove": 16078, "deon": 16079, "inflam": 16080, "freshly": 16081, "kell": 16082, "zip": 16083, "christie": 16084, "mild": 16085, "dixon": 16086, "instructor": 16087, "gence": 16088, "ãħł": 16089, "subjec": 16090, "constitutional": 16091, "crowds": 16092, "invisible": 16093, "ruins": 16094, "dak": 16095, "sip": 16096, "plaque": 16097, "pouring": 16098, "complex": 16099, "zine": 16100, "stead": 16101, "flet": 16102, "transmission": 16103, "loway": 16104, "arun": 16105, "increasingly": 16106, "aud": 16107, "transparen": 16108, "crowned": 16109, "scoun": 16110, "blizzard": 16111, "luxu": 16112, "fiers": 16113, "achievements": 16114, "hunters": 16115, "rocked": 16116, "basin": 16117, "violet": 16118, "proves": 16119, "achieving": 16120, "prosper": 16121, "sega": 16122, "float": 16123, "vian": 16124, "xiv": 16125, "polic": 16126, "tura": 16127, "approximately": 16128, "wanderlust": 16129, "keepers": 16130, "getaway": 16131, "cod": 16132, "polis": 16133, "bryan": 16134, "colts": 16135, "talents": 16136, "yogur": 16137, "glutenfree": 16138, "wrist": 16139, "gry": 16140, "czech": 16141, "ðŁİĪ": 16142, "eville": 16143, "ðŁıĪ": 16144, "tox": 16145, "daniels": 16146, "amer": 16147, "bids": 16148, "weareone": 16149, "metab": 16150, "gt": 16151, "boyz": 16152, "pdx": 16153, "possession": 16154, "pushed": 16155, "shrine": 16156, "realistic": 16157, "trigger": 16158, "navi": 16159, "rumors": 16160, "naf": 16161, "jenkins": 16162, "trun": 16163, "communi": 16164, "ÃĹ": 16165, "gamers": 16166, "armor": 16167, "mohammed": 16168, "balcony": 16169, "yah": 16170, "strongest": 16171, "rhythm": 16172, "unforgettable": 16173, "kp": 16174, "hobb": 16175, "custody": 16176, "gregor": 16177, "rita": 16178, "aesthetic": 16179, "ilation": 16180, "sponsoring": 16181, "nay": 16182, "kidnapp": 16183, "shs": 16184, "rajas": 16185, "meg": 16186, "significantly": 16187, "buttons": 16188, "lac": 16189, "versions": 16190, "essentials": 16191, "opinions": 16192, "kro": 16193, "dprinting": 16194, "widely": 16195, "dk": 16196, "uran": 16197, "yal": 16198, "requested": 16199, "cn": 16200, "curric": 16201, "plum": 16202, "grun": 16203, "vm": 16204, "devon": 16205, "myo": 16206, "relation": 16207, "juventus": 16208, "rouge": 16209, "minority": 16210, "mines": 16211, "jupiter": 16212, "nine": 16213, "oxygen": 16214, "frankie": 16215, "unesco": 16216, "fabric": 16217, "disgusting": 16218, "salman": 16219, "detection": 16220, "lanka": 16221, "dac": 16222, "ðŁĩ«ðŁĩ·": 16223, "argument": 16224, "shelves": 16225, "celtics": 16226, "roberto": 16227, "pigs": 16228, "hedge": 16229, "faul": 16230, "powering": 16231, "butterflies": 16232, "fir": 16233, "remake": 16234, "atti": 16235, "como": 16236, "empha": 16237, "kendall": 16238, "pokemon": 16239, "seating": 16240, "dans": 16241, "baldwin": 16242, "ðŁij»": 16243, "leslie": 16244, "onedirection": 16245, "timber": 16246, "iman": 16247, "font": 16248, "eder": 16249, "dion": 16250, "steph": 16251, "format": 16252, "gregory": 16253, "prop": 16254, "hex": 16255, "ruin": 16256, "sory": 16257, "infer": 16258, "naw": 16259, "barak": 16260, "sdgs": 16261, "karao": 16262, "lush": 16263, "vander": 16264, "endent": 16265, "gis": 16266, "afro": 16267, "soccer": 16268, "ayan": 16269, "tuni": 16270, "lung": 16271, "dayof": 16272, "alexa": 16273, "marath": 16274, "addicted": 16275, "agile": 16276, "hygi": 16277, "lightweight": 16278, "ì§": 16279, "mandela": 16280, "joey": 16281, "ancy": 16282, "hum": 16283, "bir": 16284, "memorial": 16285, "jimin": 16286, "ginger": 16287, "vak": 16288, "javascri": 16289, "crops": 16290, "origins": 16291, "dari": 16292, "piper": 16293, "import": 16294, "aggressive": 16295, "prediction": 16296, "repairs": 16297, "cracker": 16298, "voyage": 16299, "nike": 16300, "mummy": 16301, "linkedin": 16302, "countryside": 16303, "border": 16304, "glass": 16305, "pert": 16306, "sals": 16307, "shoe": 16308, "autographed": 16309, "walnut": 16310, "collegi": 16311, "salary": 16312, "pairing": 16313, "ðŁĮ¸": 16314, "cathol": 16315, "sweethe": 16316, "defeats": 16317, "strengthen": 16318, "rooftop": 16319, "improvements": 16320, "barriers": 16321, "uru": 16322, "tally": 16323, "ruled": 16324, "ðŁĨļ": 16325, "naija": 16326, "emoji": 16327, "percent": 16328, "gio": 16329, "probs": 16330, "once": 16331, "admits": 16332, "paths": 16333, "liar": 16334, "daytona": 16335, "peters": 16336, "cali": 16337, "calli": 16338, "mug": 16339, "osa": 16340, "aph": 16341, "aby": 16342, "hyde": 16343, "ethnic": 16344, "plains": 16345, "olf": 16346, "hahahahaha": 16347, "holic": 16348, "?!?!": 16349, "subli": 16350, "blacks": 16351, "mot": 16352, "ghton": 16353, "lovin": 16354, "brent": 16355, "baru": 16356, "lati": 16357, "dew": 16358, "ateau": 16359, "qa": 16360, "painful": 16361, "busters": 16362, "static": 16363, "ðŁĩ¨ðŁĩ¦": 16364, "notebook": 16365, "outfits": 16366, "sies": 16367, "rf": 16368, "floods": 16369, "ÑĢ": 16370, "throat": 16371, "suici": 16372, "rovers": 16373, "bengal": 16374, "prepares": 16375, "blog": 16376, "miniature": 16377, "ب": 16378, "amphi": 16379, "comb": 16380, "rsp": 16381, "intimate": 16382, "greene": 16383, "Ìĩ": 16384, "altar": 16385, "surgical": 16386, "vessel": 16387, "...?": 16388, "gavin": 16389, "gator": 16390, "threatened": 16391, "zar": 16392, "robbery": 16393, "dier": 16394, "promoted": 16395, "yg": 16396, "xs": 16397, "subs": 16398, "interviewing": 16399, "threatening": 16400, "dozen": 16401, "meado": 16402, "waterfall": 16403, "nintendoswitch": 16404, "calum": 16405, "ministers": 16406, "drop": 16407, "universities": 16408, "warned": 16409, "tactics": 16410, "ðŁĩ²": 16411, "refuse": 16412, "adju": 16413, "vast": 16414, "ðŁĺ“": 16415, "mcfc": 16416, "libya": 16417, "nofilter": 16418, "distributed": 16419, "reser": 16420, "ronnie": 16421, "deco": 16422, "javascript": 16423, "monk": 16424, "interests": 16425, "flex": 16426, "martha": 16427, "sties": 16428, "ood": 16429, "ðŁ¤£ðŁ¤£": 16430, "eun": 16431, "bali": 16432, "gomez": 16433, "stimul": 16434, "moderate": 16435, "dity": 16436, "iris": 16437, "straw": 16438, "consistent": 16439, "directions": 16440, "adopt": 16441, "salsa": 16442, "croo": 16443, "recovered": 16444, "blackfriday": 16445, "lancaster": 16446, "accept": 16447, "weareoneexo": 16448, "builds": 16449, "freeman": 16450, "airplane": 16451, "dition": 16452, "belong": 16453, "jamie": 16454, "pitching": 16455, "lif": 16456, "omin": 16457, "crispy": 16458, "prepping": 16459, "veg": 16460, "chang": 16461, "accomplished": 16462, "gracias": 16463, "dolphin": 16464, "elector": 16465, "culinary": 16466, "superbowl": 16467, "wala": 16468, "pursuit": 16469, "blackberry": 16470, "bean": 16471, "cardinal": 16472, "proved": 16473, "immigrant": 16474, "strictly": 16475, "holocaust": 16476, "passage": 16477, "haus": 16478, "coup": 16479, "purse": 16480, "harass": 16481, "<<": 16482, "leed": 16483, "adobe": 16484, "stad": 16485, "legislat": 16486, "parked": 16487, "priyan": 16488, "silva": 16489, "krist": 16490, "sthe": 16491, "funky": 16492, "iga": 16493, "settlement": 16494, "phs": 16495, "tmrw": 16496, "stressed": 16497, "hunt": 16498, "hockey": 16499, "treasures": 16500, "chambers": 16501, "olu": 16502, "hut": 16503, "marley": 16504, "texture": 16505, "wilderness": 16506, "mming": 16507, "potentially": 16508, "omaha": 16509, "judy": 16510, "toes": 16511, "spoiler": 16512, "distinguished": 16513, "felix": 16514, "ahu": 16515, "recommendations": 16516, "zombies": 16517, "hitler": 16518, "triple": 16519, "collapse": 16520, "motivated": 16521, "ultimat": 16522, "ggling": 16523, "soy": 16524, "cigar": 16525, "foren": 16526, "vineyard": 16527, "glitter": 16528, "findings": 16529, "colonial": 16530, "hunter": 16531, "erik": 16532, "dens": 16533, "beetle": 16534, "lotte": 16535, "subtle": 16536, "smatter": 16537, "trusted": 16538, "experimental": 16539, "naments": 16540, "ðŁĺĨ": 16541, "region": 16542, "acquisition": 16543, "breeding": 16544, "quarterback": 16545, "amreading": 16546, "ootd": 16547, "rude": 16548, "initiatives": 16549, "stout": 16550, "hyung": 16551, "outcome": 16552, "alfred": 16553, "mics": 16554, "expertise": 16555, "bacteria": 16556, "penguins": 16557, "jumper": 16558, "valencia": 16559, "bark": 16560, "ingday": 16561, "sellers": 16562, "contracts": 16563, "houston": 16564, "commissioned": 16565, "adaptation": 16566, "swansea": 16567, "santiago": 16568, "commonwealth": 16569, "judging": 16570, "submission": 16571, "scorer": 16572, "tommy": 16573, "ño": 16574, "exquis": 16575, "filing": 16576, "explanation": 16577, "allison": 16578, "wembley": 16579, "ridge": 16580, "chevy": 16581, "santos": 16582, "ownership": 16583, "cognitive": 16584, "favourites": 16585, "shed": 16586, "philanthro": 16587, "deleted": 16588, "godd": 16589, "snor": 16590, "guidelines": 16591, "ffing": 16592, "jeep": 16593, "clips": 16594, "swamp": 16595, "anor": 16596, "guild": 16597, "bolton": 16598, "springfield": 16599, "municipal": 16600, "goalkeeper": 16601, "yeon": 16602, "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, "ãħĭãħĭ": 16604, "waterfront": 16605, "grave": 16606, "contemporary": 16607, "arity": 16608, "ÃŃa": 16609, "sleeps": 16610, "syrup": 16611, "alam": 16612, "pire": 16613, "coyo": 16614, "motogp": 16615, "tyson": 16616, "kejri": 16617, "circul": 16618, "singly": 16619, "crunch": 16620, "complicated": 16621, "nostalgia": 16622, "kop": 16623, "move": 16624, "kale": 16625, "macro": 16626, "midwest": 16627, "hans": 16628, "tribal": 16629, "nude": 16630, "Ć ĀÆÄÆ": 16631, "beyonce": 16632, "congratulate": 16633, "cater": 16634, "league": 16635, "ðŁĻĬ": 16636, "ladder": 16637, "crashed": 16638, "technic": 16639, "karaoke": 16640, "harassment": 16641, "rots": 16642, "experiencing": 16643, "kristen": 16644, "ðŁĩ³": 16645, "ð٤Ĺ": 16646, "reflections": 16647, "guinness": 16648, "illustrator": 16649, "ðŁĻıðŁı»": 16650, "center": 16651, "narrow": 16652, "commons": 16653, "regulations": 16654, "ÙĨ": 16655, "harm": 16656, "croft": 16657, "cussion": 16658, "hongkong": 16659, "stical": 16660, "internship": 16661, "zoe": 16662, "chop": 16663, "hoods": 16664, "estimated": 16665, "batteries": 16666, "berkeley": 16667, "smoothie": 16668, "shaun": 16669, "cros": 16670, "~~": 16671, "campe": 16672, "hump": 16673, "bg": 16674, "prototype": 16675, "click": 16676, "shawn": 16677, "reviewed": 16678, "templ": 16679, "pf": 16680, "jedi": 16681, "blogs": 16682, "raymond": 16683, "asth": 16684, "bah": 16685, "avail": 16686, "scotch": 16687, "leafs": 16688, "nikki": 16689, "tok": 16690, "hollow": 16691, "urges": 16692, "oft": 16693, "unlike": 16694, "latin": 16695, "ue": 16696, "catering": 16697, "mili": 16698, "alternati": 16699, "maver": 16700, "и": 16701, "agle": 16702, "preorder": 16703, "lux": 16704, "cucu": 16705, "ðŁijıðŁijı": 16706, "tart": 16707, "âĿ¤âĿ¤âĿ¤": 16708, "arabic": 16709, "rapidly": 16710, "arrang": 16711, "allen": 16712, "traveltuesday": 16713, "paws": 16714, "flows": 16715, "stability": 16716, "fluid": 16717, "capp": 16718, "canberra": 16719, "uuuu": 16720, "spani": 16721, "demonstration": 16722, "mla": 16723, "placement": 16724, "mw": 16725, "presidents": 16726, "awesom": 16727, "beverly": 16728, "anist": 16729, "neal": 16730, "fathersday": 16731, "referendum": 16732, "lahore": 16733, "oaks": 16734, "debbie": 16735, "halfway": 16736, "ghosts": 16737, "debor": 16738, "matthews": 16739, "fiat": 16740, "tfw": 16741, "presen": 16742, "robi": 16743, "ded": 16744, "brock": 16745, "laughed": 16746, "amounts": 16747, "bamboo": 16748, "kindergarten": 16749, "eaten": 16750, "mtvhottest": 16751, "breakout": 16752, "usic": 16753, "fraser": 16754, "legislative": 16755, "pang": 16756, "module": 16757, "sammy": 16758, "gover": 16759, "earns": 16760, "expedition": 16761, "garh": 16762, "concepts": 16763, "charlie": 16764, "lava": 16765, "bachelor": 16766, "veggies": 16767, "determine": 16768, "ellie": 16769, "unlocked": 16770, "fruit": 16771, "dalla": 16772, "coupe": 16773, "washington": 16774, "deposit": 16775, "ivory": 16776, "paula": 16777, "chicag": 16778, "gucci": 16779, "ðŁİĄ": 16780, "cultiv": 16781, "pierce": 16782, "lifted": 16783, "stumb": 16784, "recover": 16785, "muscles": 16786, "conducting": 16787, "cbs": 16788, "mclaren": 16789, "sophia": 16790, "cellu": 16791, "oceans": 16792, "uploaded": 16793, "gameplay": 16794, "maldives": 16795, "kimber": 16796, "avoi": 16797, "racer": 16798, "caine": 16799, "cavs": 16800, "hana": 16801, "liga": 16802, "raven": 16803, "intervention": 16804, "inauguration": 16805, "ooh": 16806, "attraction": 16807, "merchandise": 16808, "tunein": 16809, "liking": 16810, "juniors": 16811, "intended": 16812, "attacking": 16813, "aquarium": 16814, "iwd": 16815, "components": 16816, "suring": 16817, "centu": 16818, "yogurt": 16819, "ðŁıĄ": 16820, "showroom": 16821, "optical": 16822, "tyour": 16823, "judge": 16824, "yield": 16825, "anto": 16826, "plc": 16827, "transparency": 16828, "recycled": 16829, "chief": 16830, "arom": 16831, "ambassadors": 16832, "planet": 16833, "âĿĦï¸ı": 16834, "omed": 16835, "vanessa": 16836, "court": 16837, "margar": 16838, "haley": 16839, "vr": 16840, "regina": 16841, "pdates": 16842, "hispan": 16843, "livestream": 16844, "âģ£": 16845, "yahoo": 16846, "galla": 16847, "secured": 16848, "wir": 16849, "beneath": 16850, "offl": 16851, "nil": 16852, "amb": 16853, "yeg": 16854, "outlet": 16855, "ute": 16856, "peep": 16857, "lindsay": 16858, "bentley": 16859, "...!": 16860, "heel": 16861, "trilogy": 16862, "vos": 16863, "tyre": 16864, "therefore": 16865, "toronto": 16866, "abi": 16867, "simpli": 16868, "jae": 16869, "extensive": 16870, "elephants": 16871, "sor": 16872, "orientation": 16873, "impeach": 16874, "replay": 16875, "constructed": 16876, "peterson": 16877, "pais": 16878, "ported": 16879, "customs": 16880, "collap": 16881, "adu": 16882, "highlands": 16883, "salem": 16884, "shelby": 16885, "kovic": 16886, "strain": 16887, "rosie": 16888, "senators": 16889, "snaps": 16890, "bobb": 16891, "suzuki": 16892, "blades": 16893, "kp": 16894, "lolo": 16895, "generate": 16896, "sight": 16897, "mae": 16898, "structural": 16899, "predict": 16900, "jumped": 16901, "ahmad": 16902, "sung": 16903, "justice": 16904, "glam": 16905, "volvo": 16906, "jubilee": 16907, "detention": 16908, "losses": 16909, "puri": 16910, "everytime": 16911, "а": 16912, "rao": 16913, "edge": 16914, "limer": 16915, "resemb": 16916, "harold": 16917, "retri": 16918, "sacrific": 16919, "surprises": 16920, "amc": 16921, "srilanka": 16922, "barbie": 16923, "mens": 16924, "finn": 16925, "ags": 16926, "ukrainian": 16927, "embrac": 16928, "îIJ": 16929, "flavors": 16930, "homer": 16931, "laure": 16932, "outh": 16933, "priced": 16934, "verde": 16935, "firm": 16936, "ahs": 16937, "cub": 16938, "trey": 16939, "paranor": 16940, "profit": 16941, "indv": 16942, "whoa": 16943, "harsh": 16944, "alot": 16945, "critics": 16946, "hubby": 16947, "figur": 16948, "gira": 16949, "castro": 16950, "chanel": 16951, "input": 16952, "originals": 16953, "tenant": 16954, "yyyy": 16955, "turers": 16956, "lincoln": 16957, "coon": 16958, "learn": 16959, "chou": 16960, "acare": 16961, "oles": 16962, "diner": 16963, "hyp": 16964, "bizarre": 16965, "mcr": 16966, "letsgo": 16967, "decorating": 16968, "ðŁĮİ": 16969, "alison": 16970, "arvin": 16971, "fd": 16972, "rehab": 16973, "mccarthy": 16974, "lottery": 16975, "dah": 16976, "minneapolis": 16977, "eligible": 16978, "diagnosed": 16979, "emerald": 16980, "destinations": 16981, "sans": 16982, "ory": 16983, "blazers": 16984, "nv": 16985, "bail": 16986, "digitalart": 16987, "noc": 16988, "malta": 16989, "solar": 16990, "pipes": 16991, "allegations": 16992, "nock": 16993, "pope": 16994, "brid": 16995, "premier": 16996, "nx": 16997, "presentations": 16998, "efa": 16999, "bows": 17000, "valve": 17001, "opponent": 17002, "Įƫ": 17003, "visual": 17004, "ingle": 17005, "categor": 17006, "eter": 17007, "pois": 17008, "dani": 17009, "attract": 17010, "neutral": 17011, "thene": 17012, "crashes": 17013, "freddie": 17014, "utili": 17015, "cst": 17016, "awakening": 17017, "sloven": 17018, "qualify": 17019, "proof": 17020, "fairy": 17021, "lev": 17022, "freight": 17023, "enjoys": 17024, "cupcake": 17025, "flavour": 17026, "âķ": 17027, "protective": 17028, "ðŁijıðŁı»": 17029, "isu": 17030, "admir": 17031, "hmmm": 17032, "continuous": 17033, "aires": 17034, "raptors": 17035, "showcasing": 17036, "yuk": 17037, "paste": 17038, "follower": 17039, "instructions": 17040, "spru": 17041, "@__": 17042, "theo": 17043, "debuts": 17044, "vette": 17045, "stow": 17046, "esof": 17047, "ached": 17048, "sultan": 17049, "sandwich": 17050, "somalia": 17051, "franco": 17052, "carne": 17053, "fluffy": 17054, "alpine": 17055, "jasmine": 17056, "heated": 17057, "violin": 17058, "pless": 17059, "divorce": 17060, "performer": 17061, "phies": 17062, "portsm": 17063, "dara": 17064, "kirby": 17065, "lop": 17066, "chilli": 17067, "forth": 17068, "skype": 17069, "ðŁĩ®ðŁĩ¹": 17070, "celebrities": 17071, "edy": 17072, "vee": 17073, "poison": 17074, "eyel": 17075, "grabs": 17076, "ssic": 17077, "uno": 17078, "western": 17079, "railroad": 17080, "amer": 17081, "numerous": 17082, "sv": 17083, "fow": 17084, "fist": 17085, "âĢĭ": 17086, "requests": 17087, "martial": 17088, "emmy": 17089, "acceptance": 17090, "laura": 17091, "Ć ĀøĀ“": 17092, "erup": 17093, "hyundai": 17094, "outlander": 17095, "utt": 17096, "wrestle": 17097, "espresso": 17098, "demanding": 17099, "gdp": 17100, "geography": 17101, "saskat": 17102, "troll": 17103, "confeder": 17104, "sues": 17105, "sem": 17106, "bets": 17107, "tful": 17108, "tosh": 17109, "teaches": 17110, "coloured": 17111, "galway": 17112, "macy": 17113, "disorders": 17114, "bbcra": 17115, "atem": 17116, "fender": 17117, "litter": 17118, "esh": 17119, "providers": 17120, "renovation": 17121, "nominate": 17122, "psg": 17123, "nominations": 17124, "jenna": 17125, "sharp": 17126, "someday": 17127, "zur": 17128, "brains": 17129, "cheshire": 17130, "prey": 17131, "hugo": 17132, "¿": 17133, "token": 17134, "rv": 17135, "carr": 17136, "tactical": 17137, "zelda": 17138, "kayla": 17139, "fernando": 17140, "photographers": 17141, "jour": 17142, "umbrella": 17143, "woody": 17144, "congressman": 17145, "dump": 17146, "levy": 17147, "juan": 17148, "dazz": 17149, "signals": 17150, "lain": 17151, "anu": 17152, "michel": 17153, "porch": 17154, "alden": 17155, "siblings": 17156, "yale": 17157, "peel": 17158, "swick": 17159, "ggin": 17160, "llc": 17161, "kale": 17162, "scon": 17163, "ild": 17164, "patreon": 17165, "reel": 17166, "quin": 17167, "witt": 17168, "marty": 17169, "moody": 17170, "toni": 17171, "dery": 17172, "gators": 17173, "specifically": 17174, "ddin": 17175, "lyon": 17176, "trick": 17177, "meadows": 17178, "pj": 17179, "borgh": 17180, "vik": 17181, "tur": 17182, "bronx": 17183, "puff": 17184, "lantern": 17185, "ðŁ¤¦": 17186, "gently": 17187, "bestie": 17188, "fact": 17189, "refused": 17190, "fasci": 17191, "mpy": 17192, "ðŁĶµ": 17193, "crossover": 17194, "meadow": 17195, "indianapolis": 17196, "ducation": 17197, "sley": 17198, "loom": 17199, "mixer": 17200, "newmusic": 17201, "filmmaker": 17202, "prosperity": 17203, "lim": 17204, "weekend": 17205, "creamy": 17206, "neutr": 17207, "luther": 17208, "hv": 17209, "northern": 17210, "two": 17211, "hra": 17212, "catches": 17213, "appearances": 17214, "habit": 17215, "kittens": 17216, "nv": 17217, "illac": 17218, "infan": 17219, "regardless": 17220, "lizard": 17221, "dunk": 17222, "curtain": 17223, "acom": 17224, "intu": 17225, "vez": 17226, "emin": 17227, "flats": 17228, "calendars": 17229, "empower": 17230, "ruined": 17231, "hungary": 17232, "vid": 17233, "wex": 17234, "ulum": 17235, "aberdeen": 17236, "osa": 17237, "kt": 17238, "massi": 17239, "seemed": 17240, "sden": 17241, "'?": 17242, "telephone": 17243, "defi": 17244, "inspires": 17245, "meow": 17246, "zones": 17247, "blind": 17248, "ply": 17249, "tucson": 17250, "adventure": 17251, "ged": 17252, "oyster": 17253, "ðŁijıðŁijıðŁijı": 17254, "output": 17255, "ttt": 17256, "metallic": 17257, "smash": 17258, "ucla": 17259, "scots": 17260, "perfect": 17261, "lucy": 17262, "regularly": 17263, "spic": 17264, "relative": 17265, "athers": 17266, "mise": 17267, "battling": 17268, "decides": 17269, "mata": 17270, "occupied": 17271, "randomly": 17272, "catsoftwitter": 17273, "gian": 17274, "bally": 17275, "alties": 17276, "allies": 17277, "immen": 17278, "syrac": 17279, "ðŁēľðŁēľ": 17280, "llan": 17281, "aur": 17282, "kut": 17283, "lamar": 17284, "affects": 17285, "nra": 17286, "starwar": 17287, "ð٤ĺ": 17288, "scram": 17289, "enchan": 17290, "process": 17291, "luxurious": 17292, "array": 17293, "sherlock": 17294, "compati": 17295, "dorf": 17296, "stress": 17297, "msu": 17298, "swith": 17299, "sala": 17300, "sofinstagram": 17301, "foil": 17302, "understood": 17303, "quay": 17304, "rp": 17305, "cade": 17306, "jaw": 17307, "enab": 17308, "encoun": 17309, "ðŁİī:": 17310, "dock": 17311, "saturn": 17312, "mull": 17313, "layout": 17314, "rarely": 17315, "happily": 17316, "fixture": 17317, "orph": 17318, "overlooking": 17319, "herbs": 17320, "mitt": 17321, "pillar": 17322, "nolan": 17323, "petty": 17324, "stry": 17325, "ui": 17326, "muk": 17327, "ores": 17328, "overs": 17329, "Ôµ": 17330, "recreation": 17331, "wesley": 17332, "rit": 17333, "kejriwal": 17334, "stocking": 17335, "gv": 17336, "subscribers": 17337, "moose": 17338, "mae": 17339, "bert": 17340, "oppre": 17341, "assignment": 17342, "uro": 17343, "highlighting": 17344, "calvin": 17345, "weigh": 17346, "cambodia": 17347, "avon": 17348, "kem": 17349, "disabilities": 17350, "ready": 17351, "chargers": 17352, "pads": 17353, "izing": 17354, "illian": 17355, "truste": 17356, "colleges": 17357, "associates": 17358, "albany": 17359, "milton": 17360, "cron": 17361, "bur": 17362, "hardly": 17363, "sights": 17364, "antiques": 17365, "echo": 17366, "surprisingly": 17367, "haiti": 17368, "capt": 17369, "php": 17370, "opio": 17371, "inequality": 17372, "equal": 17373, "keny": 17374, "schmid": 17375, "autographs": 17376, "rent": 17377, "quer": 17378, "citrus": 17379, "challenged": 17380, "tec": 17381, "epide": 17382, "fest": 17383, "zhou": 17384, "lime": 17385, "citizenship": 17386, "crystal": 17387, "convinced": 17388, "messenger": 17389, "copenhagen": 17390, "âĿĹï¸ı": 17391, "warran": 17392, "developments": 17393, "ï¸ıâĄ£": 17394, "forex": 17395, "hiro": 17396, "sneakers": 17397, "xide": 17398, "viva": 17399, "stereo": 17400, "batting": 17401, "ssel": 17402, "host": 17403, "bengal": 17404, "criticism": 17405, "qc": 17406, "crun": 17407, "attempted": 17408, "rye": 17409, "determination": 17410, "creations": 17411, "dread": 17412, "labels": 17413, "posse": 17414, "ancer": 17415, "johan": 17416, "sister": 17417, "partnerships": 17418, "lesbian": 17419, "kst": 17420, "guarantee": 17421, "baro": 17422, "fixing": 17423, "mason": 17424, "mous": 17425, "chemicals": 17426, "tless": 17427, "biodiversity": 17428, "paro": 17429, "bharat": 17430, "acol": 17431, "refuge": 17432, "ente": 17433, "titi": 17434, "dyssey": 17435, "responds": 17436, "lefto": 17437, "iner": 17438, "sevel": 17439, "rahul": 17440, "oline": 17441, "frankfur": 17442, "choreo": 17443, "enjoyable": 17444, "cto": 17445, "struggles": 17446, "woodland": 17447, "heavyweight": 17448, "gens": 17449, "recep": 17450, "accred": 17451, "ðŁĺ”": 17452, "transformed": 17453, "listen": 17454, "atop": 17455, "nk": 17456, "surge": 17457, "bere": 17458, "governor": 17459, "prisoners": 17460, "claude": 17461, "till": 17462, "mulator": 17463, "emotion": 17464, "waterloo": 17465, "start": 17466, "ðŁĩº": 17467, "cleaned": 17468, "grandmother": 17469, "fearless": 17470, "african": 17471, "astronomy": 17472, "ðŁıģ": 17473, "Ć ĀøÄ»": 17474, "theworld": 17475, "suitable": 17476, "anthony": 17477, "kand": 17478, "tten": 17479, "meaningful": 17480, "disclo": 17481, "jacobs": 17482, "ø": 17483, "tomlinson": 17484, "ghetti": 17485, "typho": 17486, "substan": 17487, "asco": 17488, "tek": 17489, "nagar": 17490, "mud": 17491, "amon": 17492, "vaccine": 17493, "fty": 17494, "flesh": 17495, "noel": 17496, "inflation": 17497, "portugue": 17498, "glamour": 17499, "tram": 17500, "vre": 17501, "tequ": 17502, "roundup": 17503, "wyn": 17504, "rejected": 17505, "mosaic": 17506, "sighting": 17507, "calf": 17508, "ota": 17509, "composition": 17510, "gopro": 17511, "gonzale": 17512, "eed": 17513, "bard": 17514, "tue": 17515, "effectively": 17516, "ween": 17517, "alto": 17518, "ribs": 17519, "relate": 17520, "thirsty": 17521, "furious": 17522, "dim": 17523, "chard": 17524, "perfume": 17525, "sny": 17526, "churchill": 17527, "kof": 17528, "masterclass": 17529, "wave": 17530, "ðŁĶµ": 17531, "erin": 17532, "owns": 17533, "tobe": 17534, "skilled": 17535, "tem": 17536, "gof": 17537, "eni": 17538, "tori": 17539, "crazy": 17540, "lick": 17541, "resistant": 17542, "icial": 17543, "agar": 17544, "!:": 17545, "gali": 17546, "delaware": 17547, "blitz": 17548, "kohli": 17549, "puck": 17550, "availability": 17551, "himalay": 17552, "influential": 17553, "crochet": 17554, "victori": 17555, "reading": 17556, "hobby": 17557, "viet": 17558, "jas": 17559, "engra": 17560, "skul": 17561, "ðŁĩ²ðŁĩ": 17562, "educate": 17563, "techno": 17564, "districts": 17565, "blues": 17566, "sett": 17567, "seventh": 17568, "learns": 17569, "eeee": 17570, "apocalypse": 17571, "hangout": 17572, "cruel": 17573, "mutu": 17574, "bruh": 17575, "helen": 17576, "sheer": 17577, "ction": 17578, "klein": 17579, "texans": 17580, "cereal": 17581, "shine": 17582, "nered": 17583, "gras": 17584, "ambro": 17585, "fella": 17586, "hindu": 17587, "matthew": 17588, "lima": 17589, "miranda": 17590, "jewel": 17591, "soho": 17592, "eurovision": 17593, "neighbours": 17594, "chandler": 17595, "besides": 17596, "ðŁ„°": 17597, "astros": 17598, "thumbs": 17599, "renault": 17600, "rave": 17601, "hired": 17602, "ðŁĸ¤": 17603, "itary": 17604, "zor": 17605, "blazer": 17606, "kine": 17607, "eau": 17608, "katy": 17609, "dccomics": 17610, "pec": 17611, "rodgers": 17612, "waterproof": 17613, "killers": 17614, "superint": 17615, "preserv": 17616, "asso": 17617, "brewers": 17618, "promotional": 17619, "scam": 17620, "villages": 17621, "sketches": 17622, "juicy": 17623, "forlife": 17624, "audit": 17625, "solo": 17626, "fundamental": 17627, "lene": 17628, "philippine": 17629, "tend": 17630, "conservatives": 17631, "sponsorship": 17632, "ddle": 17633, "aine": 17634, "htc": 17635, "osi": 17636, "hulk": 17637, "waf": 17638, "Ć ĀøÄ»": 17639, "evaluation": 17640, "antine": 17641, "slee": 17642, "robertson": 17643, "roosevel": 17644, "agi": 17645, "sophistic": 17646, "employers": 17647, "bubbles": 17648, "kowski": 17649, "interaction": 17650, "shu": 17651, "boule": 17652, "ican": 17653, "jare": 17654, "hank": 17655, "legitim": 17656, "knicks": 17657, "karma": 17658, "receiver": 17659, "perks": 17660, "uh": 17661, "stair": 17662, "suni": 17663, "laboratory": 17664, "graves": 17665, "vocals": 17666, "oot": 17667, "cture": 17668, "thrive": 17669, "tico": 17670, "ãĄ³": 17671, "bw": 17672, "cartoons": 17673, "mcdonalds": 17674, "draw": 17675, "yung": 17676, "pler": 17677, "lid": 17678, "ethical": 17679, "groove": 17680, "enta": 17681, "internationalwomensday": 17682, "patron": 17683, "worries": 17684, "ðŁİħ": 17685, "ðŁijĭ": 17686, "katherine": 17687, "diaz": 17688, "tori": 17689, "bachchan": 17690, "trust": 17691, "mineral": 17692, "icom": 17693, "builders": 17694, "born": 17695, "coloring": 17696, "latte": 17697, "case": 17698, "revolution": 17699, "trader": 17700, "oxid": 17701, "chipot": 17702, "instantly": 17703, "southern": 17704, "sehun": 17705, "prob": 17706, "hernandez": 17707, "lisbon": 17708, "huawe": 17709, "pong": 17710, "mea": 17711, "rooney": 17712, "wheelchair": 17713, "keen": 17714, "bett": 17715, "corin": 17716, "regulatory": 17717, "displac": 17718, "karen": 17719, "schem": 17720, "sunsets": 17721, "whales": 17722, "reminis": 17723, "hep": 17724, "hide": 17725, "marcel": 17726, "pandora": 17727, "doyle": 17728, "thfc": 17729, "otto": 17730, "nokia": 17731, "transgender": 17732, "kov": 17733, "hawaiian": 17734, "shave": 17735, "sovere": 17736, "excer": 17737, "nicki": 17738, "pug": 17739, "stor": 17740, "roth": 17741, "weet": 17742, "legal": 17743, "dignity": 17744, "pow": 17745, "homage": 17746, "ðŁĩ³ðŁĩ": 17747, "sre": 17748, "canon": 17749, "lax": 17750, "woah": 17751, "quartz": 17752, "ña": 17753, "greeting": 17754, "flickr": 17755, "nairobi": 17756, "advocates": 17757, "anc": 17758, "vii": 17759, "eugene": 17760, "thra": 17761, "cre": 17762, "elan": 17763, "pension": 17764, "thletics": 17765, "toni": 17766, "reagan": 17767, "xv": 17768, "store": 17769, "bench": 17770, "harlem": 17771, "toddler": 17772, "sentenced": 17773, "âĻ„ï¸ı": 17774, "globally": 17775, "cheaper": 17776, "uf": 17777, "mam": 17778, "nico": 17779, "iku": 17780, "thou": 17781, "nist": 17782, "dami": 17783, "thala": 17784, "rhodes": 17785, "sale": 17786, "bowls": 17787, "âĪ": 17788, "lasvegas": 17789, "sanctions": 17790, "admire": 17791, "matched": 17792, "unable": 17793, "traveler": 17794, "eleven": 17795, "strawberries": 17796, "âĢĶâĢĶâĢĶâĢĶ": 17797, "studio": 17798, "jacques": 17799, "ims": 17800, "valued": 17801, "sno": 17802, "cheesecake": 17803, "nxt": 17804, "eos": 17805, "sx": 17806, "fx": 17807, "tonic": 17808, "hatch": 17809, "chicks": 17810, "grads": 17811, "handic": 17812, "rory": 17813, "asp": 17814, "ripped": 17815, "dentist": 17816, "nen": 17817, "lufc": 17818, "âľĬ": 17819, "dige": 17820, "hopkins": 17821, "sherman": 17822, "fda": 17823, "forall": 17824, "ashley": 17825, "strand": 17826, "hy": 17827, "liquor": 17828, "buffet": 17829, "essence": 17830, "pharma": 17831, "suriya": 17832, "ðŁēĻðŁēĻ": 17833, "festivals": 17834, "zan": 17835, "refresh": 17836, "purple": 17837, "uniforms": 17838, "kenneth": 17839, "=)": 17840, "asan": 17841, "helsin": 17842, "transformers": 17843, "kali": 17844, "personalized": 17845, "chalk": 17846, "bobby": 17847, "âĮ": 17848, "themes": 17849, "departure": 17850, "print": 17851, "illustrations": 17852, "quiet": 17853, "agrees": 17854, "griff": 17855, "س": 17856, "miti": 17857, "together": 17858, "convenience": 17859, "abar": 17860, "carlo": 17861, "turtles": 17862, "infosec": 17863, "somewhat": 17864, "arlington": 17865, "scholarships": 17866, "emirates": 17867, "mums": 17868, "stella": 17869, "autonom": 17870, "feather": 17871, "gore": 17872, "nominees": 17873, "fragrance": 17874, "ƑĤ": 17875, "wong": 17876, "theastern": 17877, "gre": 17878, "zilla": 17879, "isi": 17880, "bumper": 17881, "goo": 17882, "dozens": 17883, "abduc": 17884, "âļªï¸ı": 17885, "oils": 17886, "donors": 17887, "silicon": 17888, "ipod": 17889, "fortnite": 17890, "ðŁē¨": 17891, "toro": 17892, "sparkling": 17893, "consciousness": 17894, "pala": 17895, "num": 17896, "mounted": 17897, "ffins": 17898, "thieves": 17899, "teammate": 17900, "prab": 17901, "omer": 17902, "tapes": 17903, "bod": 17904, "mitsu": 17905, "stew": 17906, "ere": 17907, "pbs": 17908, "tusc": 17909, "lowe": 17910, "rade": 17911, "parliamentary": 17912, "hm": 17913, "edgar": 17914, "ðŁijĩðŁijĩ": 17915, "toa": 17916, "agh": 17917, "honi": 17918, "slate": 17919, "geek": 17920, "apt": 17921, "hardt": 17922, "tap": 17923, "horizon": 17924, "growth": 17925, "makeover": 17926, "hil": 17927, "paperback": 17928, "idan": 17929, "rehabil": 17930, "giu": 17931, "possibilities": 17932, "lettu": 17933, "franco": 17934, "boss": 17935, "acher": 17936, "doesnt": 17937, "moe": 17938, "taker": 17939, "hussain": 17940, "mlk": 17941, "dil": 17942, "thia": 17943, "hama": 17944, "realised": 17945, "ravens": 17946, "curriculum": 17947, "mith": 17948, "knight": 17949, "tedx": 17950, "rv": 17951, "isaiah": 17952, "cumbria": 17953, "birthdays": 17954, "fing": 17955, "prez": 17956, "mubarak": 17957, "exquisite": 17958, "clearance": 17959, "yen": 17960, "pari": 17961, "evo": 17962, "ú": 17963, "modified": 17964, "applying": 17965, "implement": 17966, "discovering": 17967, "chapman": 17968, "indiegame": 17969, "disk": 17970, "crowdfunding": 17971, "machin": 17972, "livel": 17973, "styled": 17974, "âĿĮ": 17975, "making": 17976, "rehearsals": 17977, "nutriti": 17978, "subscription": 17979, "andro": 17980, "creators": 17981, "carries": 17982, "kylie": 17983, "camden": 17984, "apprentice": 17985, "taxpay": 17986, "cca": 17987, "tuesdaythoughts": 17988, "pissed": 17989, "erman": 17990, "detec": 17991, "freedom": 17992, "meri": 17993, "..!": 17994, "psalm": 17995, "sunlight": 17996, "perspec": 17997, "beings": 17998, "bookstore": 17999, "rockstar": 18000, "functions": 18001, "pence": 18002, "faves": 18003, "zn": 18004, "obamacare": 18005, "spill": 18006, "coventry": 18007, "pigeon": 18008, "pivo": 18009, "bait": 18010, "kolkata": 18011, "aval": 18012, "donor": 18013, "wah": 18014, "privileg": 18015, "traditions": 18016, "rajasthan": 18017, "teness": 18018, "portuguese": 18019, "ynes": 18020, "tackles": 18021, "defic": 18022, "torn": 18023, "polling": 18024, "thorne": 18025, "ina": 18026, "benedict": 18027, "barry": 18028, "calories": 18029, "verdict": 18030, "savethe": 18031, "norton": 18032, "office": 18033, "mainstream": 18034, "improves": 18035, "fron": 18036, "responding": 18037, "realtor": 18038, "scottish": 18039, "declar": 18040, "rl": 18041, "shiv": 18042, "supplier": 18043, "resting": 18044, "sweets": 18045, "qui": 18046, ".â̦": 18047, "whitney": 18048, "startup": 18049, "thankyou": 18050, "teacher": 18051, "halls": 18052, "have": 18053, "handmade": 18054, "proving": 18055, "quartet": 18056, "rochester": 18057, "lian": 18058, "virtual": 18059, "mendes": 18060, "oficial": 18061, "midlands": 18062, "xbox": 18063, "measuring": 18064, "ovo": 18065, "accommodation": 18066, "brides": 18067, "collegiate": 18068, "intellectual": 18069, "incar": 18070, "niag": 18071, "ðŁį·": 18072, "sfw": 18073, "cocoa": 18074, "coats": 18075, "civilians": 18076, "presidency": 18077, "matrix": 18078, "sweetheart": 18079, "triathlon": 18080, "wagner": 18081, "radic": 18082, "planner": 18083, "theo": 18084, "execution": 18085, "kum": 18086, "thewalkingdead": 18087, "scar": 18088, "rotation": 18089, "blogging": 18090, "bomb": 18091, "reson": 18092, "bbles": 18093, "stare": 18094, "assisted": 18095, "edo": 18096, "branded": 18097, "warnings": 18098, "thorpe": 18099, "acknowle": 18100, "satisfied": 18101, "shores": 18102, "rid": 18103, "dora": 18104, "physically": 18105, "bigh": 18106, "approves": 18107, "hah": 18108, "rical": 18109, "versatile": 18110, "pretend": 18111, "lum": 18112, "abhi": 18113, "yee": 18114, "spit": 18115, "ãĢĮ": 18116, "djs": 18117, "ashtra": 18118, "jt": 18119, "venues": 18120, "grammys": 18121, "cyclo": 18122, "tracker": 18123, "overwatch": 18124, "replica": 18125, "elyn": 18126, "nrl": 18127, "lindsey": 18128, "homo": 18129, "balloons": 18130, "kitchen": 18131, "sis": 18132, "amos": 18133, "endeav": 18134, "ðŁē»": 18135, "arec": 18136, "thug": 18137, "hooked": 18138, "hrc": 18139, "newyork": 18140, "burgh": 18141, "americas": 18142, "patricia": 18143, "ugu": 18144, "apathy": 18145, "hast": 18146, "psychi": 18147, "cork": 18148, "petrol": 18149, "ðŁİ¬": 18150, "aku": 18151, "popping": 18152, "psychological": 18153, "aux": 18154, "gma": 18155, "cadillac": 18156, "waste": 18157, "authent": 18158, "bristol": 18159, "name": 18160, "queer": 18161, "tober": 18162, "jerry": 18163, "comin": 18164, "chant": 18165, "privileged": 18166, "opar": 18167, "loser": 18168, "text": 18169, "marker": 18170, "stries": 18171, "equally": 18172, "aki": 18173, "christmas": 18174, "gareth": 18175, "blew": 18176, "emma": 18177, "imagin": 18178, "seals": 18179, "cheat": 18180, "conditioning": 18181, "jana": 18182, "rens": 18183, "daries": 18184, "oasis": 18185, "discounts": 18186, "council": 18187, "ika": 18188, "shirley": 18189, "voucher": 18190, "alps": 18191, "wx": 18192, "qr": 18193, "drift": 18194, "attempting": 18195, "utc": 18196, "ت": 18197, "gonzalez": 18198, "mf": 18199, "joker": 18200, "parallel": 18201, "pare": 18202, "aspects": 18203, "procedu": 18204, "np": 18205, "ama": 18206, "raleigh": 18207, "brighten": 18208, "guire": 18209, "radiation": 18210, "crescent": 18211, "hob": 18212, "ille": 18213, "strand": 18214, "vore": 18215, "nard": 18216, "chest": 18217, "diwali": 18218, "avatar": 18219, "alder": 18220, "dling": 18221, "pathetic": 18222, "ðŁēĺ": 18223, "spirit": 18224, "jorge": 18225, "filmmaking": 18226, "ðŁĻıðŁĻı": 18227, "challenger": 18228, "bj": 18229, "downtown": 18230, "html": 18231, "adequ": 18232, "twisted": 18233, "inely": 18234, "('": 18235, "wraps": 18236, "operational": 18237, "yne": 18238, "nus": 18239, "magnet": 18240, "marketplace": 18241, "healthier": 18242, "snapshot": 18243, "damon": 18244, "interven": 18245, "federer": 18246, "owls": 18247, "biscuits": 18248, "jp": 18249, "rodeo": 18250, "blueberry": 18251, "lection": 18252, "frontier": 18253, "summers": 18254, "reyes": 18255, "pedestrian": 18256, "gol": 18257, "caffe": 18258, "refurbi": 18259, "boulder": 18260, "meghan": 18261, "specialty": 18262, "lass": 18263, "ei": 18264, "suspects": 18265, "approx": 18266, "rrr": 18267, "rath": 18268, "stim": 18269, "crushed": 18270, "hed": 18271, "whun": 18272, "loaf": 18273, "crore": 18274, "rivera": 18275, "genetics": 18276, "sock": 18277, "wasted": 18278, "nypd": 18279, "answering": 18280, "dove": 18281, "bella": 18282, "olin": 18283, "dun": 18284, "fiji": 18285, "pretty": 18286, "sparkle": 18287, "yun": 18288, "jd": 18289, "europa": 18290, "lifts": 18291, "amber": 18292, "mur": 18293, "tek": 18294, "boyd": 18295, "royalty": 18296, "indo": 18297, "rib": 18298, "gotham": 18299, "tiest": 18300, "installing": 18301, "kemp": 18302, "thephoto": 18303, "cosmic": 18304, ")))": 18305, "wholesale": 18306, "loyment": 18307, "easy": 18308, "suing": 18309, "settled": 18310, "afp": 18311, "prover": 18312, "supportive": 18313, "rees": 18314, "neath": 18315, "deliber": 18316, "cé": 18317, "welcome": 18318, "picoftheday": 18319, "newborn": 18320, "patty": 18321, "suns": 18322, "siest": 18323, "flint": 18324, "differently": 18325, "spoilers": 18326, "trooper": 18327, "gins": 18328, "cory": 18329, "lookout": 18330, "equipped": 18331, "tape": 18332, "toby": 18333, "researcher": 18334, "ush": 18335, "keyes": 18336, "alma": 18337, "induction": 18338, "kw": 18339, "khar": 18340, "slick": 18341, "bride": 18342, "eur": 18343, "craving": 18344, "bookings": 18345, "ches": 18346, "trunk": 18347, "vernon": 18348, "spher": 18349, "crystals": 18350, "relatively": 18351, "pompe": 18352, "unions": 18353, "valley": 18354, "para": 18355, "want": 18356, "okc": 18357, "deaf": 18358, "sergio": 18359, "lennon": 18360, "shay": 18361, "cra": 18362, "vat": 18363, "hee": 18364, "twe": 18365, "liquid": 18366, "poly": 18367, "ðŁİģ": 18368, "bent": 18369, "bearing": 18370, "motorsport": 18371, "barbe": 18372, "testi": 18373, "hani": 18374, "financing": 18375, "astronaut": 18376, "watercolour": 18377, "rish": 18378, "comiccon": 18379, "gart": 18380, "wrong": 18381, "bern": 18382, "itan": 18383, "stepped": 18384, "filters": 18385, "clow": 18386, "mex": 18387, "demons": 18388, "allo": 18389, "expanded": 18390, "command": 18391, "eters": 18392, "goats": 18393, "siri": 18394, "yr": 18395, "pottery": 18396, "marion": 18397, "ile": 18398, "elan": 18399, "santo": 18400, "persona": 18401, "duke": 18402, "homeless": 18403, "lighted": 18404, "wheeler": 18405, "changer": 18406, "cabbage": 18407, "surreal": 18408, "hamburg": 18409, "smashed": 18410, "stran": 18411, "knot": 18412, "iart": 18413, "obi": 18414, "bedro": 18415, "dial": 18416, "thick": 18417, "bingo": 18418, "fus": 18419, "vacuum": 18420, "conve": 18421, "ative": 18422, "accuracy": 18423, "account": 18424, "refer": 18425, "riz": 18426, "spiderman": 18427, "bana": 18428, "rite": 18429, "ub": 18430, "abs": 18431, "medical": 18432, "link": 18433, "siem": 18434, ">>>>": 18435, "betra": 18436, "glowing": 18437, "reactions": 18438, "puppet": 18439, "spaghetti": 18440, "angs": 18441, "remedi": 18442, "prayfor": 18443, "royce": 18444, "charlotte": 18445, "£ï¸ı": 18446, "ghet": 18447, "affecting": 18448, "rode": 18449, "socialist": 18450, "moses": 18451, "azi": 18452, "oit": 18453, "reporters": 18454, "cdt": 18455, "aping": 18456, "snat": 18457, "minimal": 18458, "waist": 18459, "siege": 18460, ">>>>": 18461, "rig": 18462, "schmidt": 18463, "hare": 18464, "eca": 18465, "thorn": 18466, "hemp": 18467, "esthe": 18468, "clyde": 18469, "tha": 18470, "donut": 18471, "mohamed": 18472, "lingerie": 18473, "legg": 18474, "carpenter": 18475, "performers": 18476, "dea": 18477, "imagined": 18478, "curse": 18479, "lash": 18480, "ctr": 18481, "agua": 18482, "roar": 18483, "gri": 18484, "role": 18485, "jfk": 18486, "resurrec": 18487, "roosevelt": 18488, "marilyn": 18489, "smalle": 18490, "willis": 18491, "waited": 18492, "charities": 18493, "theres": 18494, "lik": 18495, "original": 18496, "cari": 18497, "cough": 18498, "cruci": 18499, "lagun": 18500, "contrast": 18501, "kou": 18502, "armour": 18503, "removing": 18504, "tent": 18505, "mazda": 18506, "brighter": 18507, "thief": 18508, "corner": 18509, "tequila": 18510, "buzzing": 18511, "albi": 18512, "pam": 18513, "azure": 18514, "discoun": 18515, "pixelart": 18516, "possibility": 18517, "hamont": 18518, "trades": 18519, "buda": 18520, "hive": 18521, "versy": 18522, "finch": 18523, "transpa": 18524, "emi": 18525, "terrifying": 18526, "inqui": 18527, "gba": 18528, "substitu": 18529, "collecti": 18530, "placing": 18531, "cindy": 18532, "kann": 18533, "patho": 18534, "diamond": 18535, "mourinho": 18536, "guinea": 18537, "anthropo": 18538, "airs": 18539, "pumps": 18540, "ìļ": 18541, "paso": 18542, "curling": 18543, "anita": 18544, "residency": 18545, "newh": 18546, "joon": 18547, "cigarette": 18548, "queue": 18549, "extrac": 18550, "games": 18551, "splen": 18552, "express": 18553, "publicly": 18554, "bonnie": 18555, "tribune": 18556, "baek": 18557, "reasonable": 18558, "cor": 18559, "timothy": 18560, "sheeran": 18561, "ı": 18562, "fdn": 18563, "sutton": 18564, "concentration": 18565, "caravan": 18566, "xavier": 18567, "alger": 18568, "cylin": 18569, "frederick": 18570, "nerve": 18571, "peak": 18572, "lettuce": 18573, "jail": 18574, "pregame": 18575, "kavan": 18576, "upgraded": 18577, "ecology": 18578, "squadron": 18579, "grapes": 18580, "goog": 18581, "pastry": 18582, "ðŁĹ£": 18583, "ãĄ¼ãĄ": 18584, "milano": 18585, "awaz": 18586, "presenter": 18587, "ðŁĮ¿": 18588, "herd": 18589, "kings": 18590, "template": 18591, "flour": 18592, "hv": 18593, "kley": 18594, "iya": 18595, "spec": 18596, "ater": 18597, "frankfurt": 18598, "coch": 18599, "texting": 18600, "deli": 18601, "communist": 18602, "regiment": 18603, "eleanor": 18604, "anticipated": 18605, "ðŁijĮðŁı»": 18606, "thephotohour": 18607, "rano": 18608, "surviving": 18609, "simulation": 18610, "dawson": 18611, "arin": 18612, "aqua": 18613, "mor": 18614, "â̦.": 18615, "cino": 18616, "iraqi": 18617, "shaz": 18618, "dundee": 18619, "wes": 18620, "drau": 18621, "hannah": 18622, "snews": 18623, "occupation": 18624, "steen": 18625, "xm": 18626, "angles": 18627, "settings": 18628, "guru": 18629, "knox": 18630, "orca": 18631, "shaping": 18632, "went": 18633, "drilling": 18634, "zzie": 18635, "bri": 18636, "kissing": 18637, "find": 18638, "maine": 18639, "âŃIJï¸ıâŃIJï¸ı": 18640, "ðŁĮį": 18641, "larry": 18642, "busted": 18643, "tavern": 18644, "actively": 18645, "-\"": 18646, "replacing": 18647, "nod": 18648, "unlock": 18649, ".\"": 18650, "âŀ¤": 18651, "affiliate": 18652, "tow": 18653, "ln": 18654, "happynewyear": 18655, "dif": 18656, "jm": 18657, "greenwich": 18658, "controversy": 18659, "dawg": 18660, "condol": 18661, "savannah": 18662, "compensation": 18663, "touchdown": 18664, "teo": 18665, "ambitious": 18666, "embroi": 18667, "convicted": 18668, "iartg": 18669, "barack": 18670, "trance": 18671, "testimony": 18672, "audition": 18673, "thumb": 18674, "myths": 18675, "bex": 18676, "quez": 18677, "orchid": 18678, "deny": 18679, "entitled": 18680, "hood": 18681, "grant": 18682, "inbox": 18683, "bluejays": 18684, "rilla": 18685, "smallest": 18686, "burden": 18687, "infamous": 18688, "divided": 18689, "boundaries": 18690, "tter": 18691, "elt": 18692, "wyoming": 18693, "beverage": 18694, "mesm": 18695, "onews": 18696, "buddhist": 18697, "yana": 18698, "assad": 18699, "isms": 18700, "barrett": 18701, "predicted": 18702, "backto": 18703, "twit": 18704, "ethere": 18705, "captains": 18706, "escaped": 18707, "ayo": 18708, "lamborgh": 18709, "gardner": 18710, "laps": 18711, "kal": 18712, "advertisement": 18713, "insects": 18714, "napo": 18715, "amen": 18716, "acy": 18717, "rand": 18718, "gk": 18719, "teh": 18720, "kathle": 18721, "tridge": 18722, "pancake": 18723, "atro": 18724, "pyramid": 18725, "bula": 18726, "paralym": 18727, "gauge": 18728, "encies": 18729, "tomy": 18730, "biscuit": 18731, "butcher": 18732, "qualifier": 18733, "county": 18734, "kei": 18735, "pools": 18736, "darker": 18737, "shoulders": 18738, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, "spre": 18740, "(\"": 18741, "writers": 18742, "gm": 18743, "ðŁİĵ": 18744, "knit": 18745, "huff": 18746, "mtb": 18747, "phillies": 18748, "ost": 18749, "denis": 18750, "gart": 18751, "licensed": 18752, "interface": 18753, "excel": 18754, "dwell": 18755, "fromthe": 18756, "cofficial": 18757, "azzi": 18758, "appearing": 18759, "forest": 18760, "nana": 18761, "keith": 18762, "manufacturers": 18763, "beckham": 18764, ")?": 18765, "ese": 18766, "colony": 18767, "delicate": 18768, "utter": 18769, "mcin": 18770, "transplant": 18771, "preferred": 18772, "pard": 18773, "arie": 18774, "hub": 18775, "pods": 18776, "perspectives": 18777, "pict": 18778, "delu": 18779, "apper": 18780, "bethan": 18781, "pmo": 18782, "criminals": 18783, "feminism": 18784, "shack": 18785, "circumstances": 18786, "fellas": 18787, "protesting": 18788, "wax": 18789, "suggested": 18790, "tator": 18791, "drew": 18792, "omni": 18793, "fake": 18794, "kathy": 18795, "reb": 18796, "deline": 18797, "berni": 18798, "misty": 18799, "ðŁij©": 18800, "erable": 18801, "breakthrough": 18802, "menswear": 18803, "millennials": 18804, "chanyeol": 18805, "laz": 18806, "insert": 18807, "replies": 18808, "phrase": 18809, "nx": 18810, "iheartawards": 18811, "audrey": 18812, "granite": 18813, "racec": 18814, "orie": 18815, "terra": 18816, "innovations": 18817, "brittany": 18818, "ateral": 18819, "pear": 18820, "biological": 18821, "shments": 18822, "institution": 18823, "msn": 18824, "frequency": 18825, "dman": 18826, "neglec": 18827, "tf": 18828, "stefan": 18829, "foxnews": 18830, "typo": 18831, "comms": 18832, "sequence": 18833, "carmen": 18834, "whites": 18835, "economist": 18836, "exeter": 18837, "seum": 18838, "resorts": 18839, "casually": 18840, "bunde": 18841, "divide": 18842, "ع": 18843, "gag": 18844, "creed": 18845, "retire": 18846, "caucus": 18847, "rapids": 18848, "wrestlemania": 18849, "tulsa": 18850, "sunderland": 18851, "fundament": 18852, "odi": 18853, "yamaha": 18854, "vary": 18855, "intrigu": 18856, "else": 18857, "beacon": 18858, "angie": 18859, "traded": 18860, "transm": 18861, "gents": 18862, "knitting": 18863, "galac": 18864, "ðĿĹ": 18865, "uto": 18866, "seaside": 18867, "holt": 18868, "rers": 18869, "fargo": 18870, "trainers": 18871, "monsoon": 18872, "bale": 18873, "sought": 18874, "maddie": 18875, "hw": 18876, "coli": 18877, "fran": 18878, "favs": 18879, "ðŁēĶ": 18880, "intent": 18881, "rally": 18882, "sbs": 18883, "lemonade": 18884, "barackobama": 18885, "bread": 18886, "sticky": 18887, "explosive": 18888, "chelten": 18889, "tj": 18890, "assoc": 18891, "ramen": 18892, "homies": 18893, "vlog": 18894, "mister": 18895, "lord": 18896, "âĢįâĻĢï¸ı": 18897, "alyssa": 18898, "sketchbook": 18899, "rumble": 18900, "catch": 18901, "migrant": 18902, "discipline": 18903, "unlikely": 18904, "chronicles": 18905, "flora": 18906, "slams": 18907, "amid": 18908, "sboro": 18909, "coop": 18910, "jumps": 18911, "tranqu": 18912, "melis": 18913, "sofia": 18914, "enri": 18915, "gabe": 18916, "syri": 18917, "nicolas": 18918, "chai": 18919, "wv": 18920, "becky": 18921, "footy": 18922, "tao": 18923, "suppose": 18924, "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, "plush": 18926, "rish": 18927, "ð٤ĵ": 18928, "kha": 18929, "saturdays": 18930, "accent": 18931, "hec": 18932, "limit": 18933, "carlton": 18934, "wired": 18935, "taylorswift": 18936, "ðŁĺij": 18937, "sql": 18938, "harro": 18939, "recipients": 18940, "gat": 18941, "gop": 18942, "thof": 18943, "amazed": 18944, "ghan": 18945, "ðŁıĨðŁıĨ": 18946, "porto": 18947, "clare": 18948, "distant": 18949, "nac": 18950, "ohio": 18951, "ðŁĻıðŁı¼": 18952, "mtn": 18953, "antibio": 18954, "dinosa": 18955, "mesa": 18956, "partial": 18957, "bv": 18958, "learnt": 18959, "lovato": 18960, "question": 18961, "extract": 18962, "gossip": 18963, "gibb": 18964, "niagara": 18965, "ðŁij¨": 18966, "displayed": 18967, "sooner": 18968, "stevie": 18969, "nuggets": 18970, "mln": 18971, "brom": 18972, "turb": 18973, "giveaways": 18974, "stupi": 18975, "blink": 18976, "cili": 18977, "convenient": 18978, "moh": 18979, "vive": 18980, "fric": 18981, "cause": 18982, "chamber": 18983, "cules": 18984, "nearest": 18985, "isse": 18986, "smallbiz": 18987, "tj": 18988, "canadians": 18989, "smarter": 18990, "brasil": 18991, "rare": 18992, "quette": 18993, "wha": 18994, "candle": 18995, "atomic": 18996, "ðŁijįðŁijį": 18997, "warrior": 18998, "relaxed": 18999, "strips": 19000, "neur": 19001, "kka": 19002, "rfc": 19003, "jensen": 19004, "recovering": 19005, "responses": 19006, "salam": 19007, "orthodox": 19008, "active": 19009, "ellers": 19010, "nit": 19011, "âŃIJ": 19012, "metropolitan": 19013, "centuries": 19014, "vida": 19015, "grading": 19016, "transparent": 19017, "simple": 19018, "dots": 19019, "superintendent": 19020, "elevator": 19021, "automated": 19022, "redskins": 19023, "imam": 19024, "summertime": 19025, "jonathan": 19026, "gearing": 19027, "michelle": 19028, "conflic": 19029, "mice": 19030, "tote": 19031, "publish": 19032, "pax": 19033, ")-": 19034, "nailed": 19035, "ԓ": 19036, "telescope": 19037, "serbia": 19038, "bab": 19039, "apeu": 19040, "stically": 19041, "senti": 19042, "rats": 19043, "isolated": 19044, "group": 19045, "hatred": 19046, "paranormal": 19047, "stanley": 19048, "alion": 19049, "safety": 19050, "ls": 19051, "र": 19052, "nexus": 19053, "alexandra": 19054, "masks": 19055, "++": 19056, "tron": 19057, "auk": 19058, "brotherhood": 19059, "browse": 19060, "mixes": 19061, "simone": 19062, "musk": 19063, "approve": 19064, "lola": 19065, "exp": 19066, "perth": 19067, "futuri": 19068, "unseen": 19069, "dm": 19070, "chelse": 19071, "scouting": 19072, "owe": 19073, "portsmouth": 19074, "kram": 19075, "mize": 19076, "dispen": 19077, "sup": 19078, "dlc": 19079, "advert": 19080, "teresa": 19081, "isle": 19082, "cycle": 19083, "metall": 19084, "shields": 19085, "mariners": 19086, "raz": 19087, "ingen": 19088, "fund": 19089, "ango": 19090, "jones": 19091, "oka": 19092, "madden": 19093, "broccoli": 19094, "dominic": 19095, "situations": 19096, "mero": 19097, "cricke": 19098, "punishment": 19099, "db": 19100, "shaking": 19101, "ðŁĺļ": 19102, "mq": 19103, "arians": 19104, "leh": 19105, "claw": 19106, "weds": 19107, "dure": 19108, "niel": 19109, "jelly": 19110, "gourmet": 19111, "traders": 19112, "levi": 19113, "wages": 19114, "knees": 19115, "wise": 19116, "heavenly": 19117, "avid": 19118, "melody": 19119, "zack": 19120, "bananas": 19121, "apprentice": 19122, "prop": 19123, "funny": 19124, "ode": 19125, "respected": 19126, "megan": 19127, "fewer": 19128, "drafted": 19129, "medit": 19130, "grape": 19131, "usarmy": 19132, "crusad": 19133, "vocali": 19134, "preparations": 19135, "nonsense": 19136, "usage": 19137, "thr": 19138, "roth": 19139, "wizards": 19140, "inside": 19141, "promotions": 19142, "mona": 19143, "redsox": 19144, "sig": 19145, "elegance": 19146, "chia": 19147, "universal": 19148, "ãĢį": 19149, "raja": 19150, "unga": 19151, "pollin": 19152, "filipino": 19153, "aka": 19154, "tsun": 19155, "ikon": 19156, "biking": 19157, "decorations": 19158, "zac": 19159, "cadets": 19160, "humour": 19161, "agm": 19162, "reppin": 19163, "vaccin": 19164, "elove": 19165, "uw": 19166, "diabe": 19167, "gallagher": 19168, "azer": 19169, "dol": 19170, "awhile": 19171, "prominent": 19172, "welsh": 19173, "tann": 19174, "')": 19175, "bien": 19176, "wag": 19177, "inal": 19178, "cwc": 19179, "wicket": 19180, "urst": 19181, "qanon": 19182, "xe": 19183, "outdoor": 19184, "dunn": 19185, "starr": 19186, "cology": 19187, "ricky": 19188, "uefa": 19189, "rebounds": 19190, "smusic": 19191, "infant": 19192, "ðŁĻĭ": 19193, "sop": 19194, "umber": 19195, "handing": 19196, "begin": 19197, "sorting": 19198, "hash": 19199, "spati": 19200, "rek": 19201, "budapest": 19202, "blackhawks": 19203, "delete": 19204, "rom": 19205, "candid": 19206, "authori": 19207, "debris": 19208, "specul": 19209, "intersection": 19210, "marriott": 19211, "imran": 19212, "ðŁĺģðŁĺģ": 19213, "cruises": 19214, "ramsey": 19215, "rafael": 19216, "awareness": 19217, "vascular": 19218, "beyoncé": 19219, "rug": 19220, "ðŁĺĮ": 19221, "festiv": 19222, "aram": 19223, "sable": 19224, "basil": 19225, "pill": 19226, "flooring": 19227, "unbeaten": 19228, "implications": 19229, "uf": 19230, "wound": 19231, "forge": 19232, "pointing": 19233, "pots": 19234, "popularity": 19235, "ðŁijıðŁı»": 19236, "manipul": 19237, "slots": 19238, "debates": 19239, "absence": 19240, "vermont": 19241, "neverforget": 19242, "wrist": 19243, "gloria": 19244, "rence": 19245, "husk": 19246, "melting": 19247, "ðŁİŁ": 19248, "braces": 19249, "timely": 19250, "transforming": 19251, "amps": 19252, "mak": 19253, "poe": 19254, "ahan": 19255, "generally": 19256, "ndp": 19257, "aleppo": 19258, "unicef": 19259, "profs": 19260, "nord": 19261, "mask": 19262, "jacksonville": 19263, "vv": 19264, "shells": 19265, "blooming": 19266, "operators": 19267, "charcoal": 19268, "neville": 19269, "magi": 19270, "chip": 19271, "sama": 19272, "iran": 19273, "reforms": 19274, "accumul": 19275, "rue": 19276, "æľ": 19277, "websites": 19278, "gaon": 19279, "devastating": 19280, "stos": 19281, "glacier": 19282, "rapp": 19283, "chipotle": 19284, "pra": 19285, "orous": 19286, "romney": 19287, "season": 19288, "decorative": 19289, "cisco": 19290, "ditch": 19291, "complain": 19292, "llo": 19293, "assume": 19294, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, "nels": 19296, "centric": 19297, "ftw": 19298, "carrots": 19299, "tata": 19300, "canter": 19301, "perience": 19302, "liers": 19303, "demos": 19304, "blunt": 19305, "operate": 19306, "reservations": 19307, "leah": 19308, "substance": 19309, "dison": 19310, "ante": 19311, "election": 19312, "vue": 19313, "square": 19314, "nonprofit": 19315, "caa": 19316, "fsu": 19317, "yam": 19318, "ãĤ¤": 19319, "vladi": 19320, "completes": 19321, "mari": 19322, "phillip": 19323, "neill": 19324, "eras": 19325, "kait": 19326, "mendo": 19327, "maharashtra": 19328, "gp": 19329, "dane": 19330, "providence": 19331, "therapeu": 19332, "juvenile": 19333, "memo": 19334, "incorpor": 19335, "aaaa": 19336, "seventeen": 19337, "teenager": 19338, "ã": 19339, "orns": 19340, "wide": 19341, "cuteness": 19342, "twd": 19343, "ffles": 19344, "bara": 19345, "comedy": 19346, "overtime": 19347, "yaz": 19348, "baron": 19349, "unemployment": 19350, "ðŁijĭ": 19351, "exterior": 19352, "dense": 19353, "centres": 19354, "matchup": 19355, "historymonth": 19356, "artificial": 19357, "quit": 19358, "esk": 19359, "warn": 19360, "critic": 19361, "jaf": 19362, "ðŁĵ²": 19363, "informative": 19364, "fuels": 19365, "recycle": 19366, "naming": 19367, "stripe": 19368, "solic": 19369, "molecular": 19370, "deepi": 19371, "convo": 19372, "ssel": 19373, "nae": 19374, "descent": 19375, "tiz": 19376, "accountability": 19377, "terry": 19378, "rito": 19379, "slay": 19380, "emo": 19381, "demol": 19382, "sensation": 19383, "cov": 19384, "tore": 19385, "roundtable": 19386, "yol": 19387, "excuses": 19388, "à„į": 19389, "turquo": 19390, "hhhh": 19391, "podcasts": 19392, "celeb": 19393, "messi": 19394, "lio": 19395, "mann": 19396, "contributed": 19397, "uz": 19398, "generator": 19399, "elets": 19400, "veggie": 19401, "indul": 19402, "ensuring": 19403, "detroit": 19404, "punjab": 19405, "transpor": 19406, "instruction": 19407, "add": 19408, "porcel": 19409, "paneli": 19410, "circles": 19411, "persist": 19412, "clayton": 19413, "spn": 19414, "dogsoftwitter": 19415, "isnt": 19416, "spr": 19417, "retailers": 19418, "pw": 19419, "hungar": 19420, "elena": 19421, "monaster": 19422, "guatem": 19423, "jessie": 19424, "anz": 19425, "rashi": 19426, "flee": 19427, "carving": 19428, "faux": 19429, "lal": 19430, "henri": 19431, "djo": 19432, "dull": 19433, "sana": 19434, "lara": 19435, "globe": 19436, "crimson": 19437, "compass": 19438, "pause": 19439, "nab": 19440, "lionel": 19441, "baths": 19442, "ufo": 19443, "inventory": 19444, "singh": 19445, "satan": 19446, "ðŁĩ¸": 19447, "cements": 19448, "inform": 19449, "generated": 19450, "biden": 19451, "avg": 19452, "tasks": 19453, "deer": 19454, "sau": 19455, "jailed": 19456, "pastel": 19457, "scc": 19458, "nail": 19459, "steele": 19460, "peris": 19461, "lamborghini": 19462, "pursue": 19463, "margin": 19464, "uch": 19465, "bosch": 19466, "drain": 19467, "clara": 19468, "bom": 19469, "latino": 19470, "webster": 19471, "rosemary": 19472, "rha": 19473, "soun": 19474, "billionaire": 19475, "notch": 19476, "percentage": 19477, "conor": 19478, "'\"": 19479, "homes": 19480, "earthday": 19481, "hort": 19482, "biggest": 19483, "disin": 19484, "walton": 19485, "editors": 19486, "imma": 19487, "omar": 19488, "equivalent": 19489, "pharmaceu": 19490, "ahmed": 19491, "cameo": 19492, "hanni": 19493, "underrated": 19494, "gement": 19495, "microbi": 19496, "voo": 19497, "honorable": 19498, "obesity": 19499, "âļ”ï¸ı": 19500, "limerick": 19501, "involvement": 19502, "stagram": 19503, "boulevard": 19504, "burg": 19505, "blackandwhite": 19506, "liberation": 19507, "five": 19508, "interim": 19509, "smm": 19510, "rivalry": 19511, "capabilities": 19512, "statements": 19513, "thumb": 19514, "ved": 19515, "swans": 19516, "barber": 19517, "eque": 19518, "serena": 19519, "helm": 19520, "noodle": 19521, "sampling": 19522, "nawaz": 19523, "single": 19524, "thunderstorms": 19525, "shon": 19526, "inev": 19527, "ë¯": 19528, "topp": 19529, "orchard": 19530, "bian": 19531, "ðŁĺĶ": 19532, "doorstep": 19533, "salvation": 19534, "marketing": 19535, "rons": 19536, "clemson": 19537, "ravi": 19538, "intake": 19539, "standwith": 19540, "sina": 19541, "haiku": 19542, "pley": 19543, "electoral": 19544, "philly": 19545, "lays": 19546, "electric": 19547, "capturing": 19548, "upp": 19549, "ergy": 19550, "believing": 19551, "cultures": 19552, "esday": 19553, "invasive": 19554, "eded": 19555, "speech": 19556, "endur": 19557, "vietnam": 19558, "boycott": 19559, "pede": 19560, "deliver": 19561, "ðŁēĸðŁēĸ": 19562, "merchant": 19563, "stir": 19564, "denies": 19565, "pockets": 19566, "oti": 19567, "cuddle": 19568, "roland": 19569, "mmed": 19570, "dened": 19571, "learners": 19572, "hoop": 19573, "sourcing": 19574, "hacked": 19575, "dim": 19576, "environments": 19577, "benson": 19578, "judicial": 19579, "worcester": 19580, "pearls": 19581, "governments": 19582, "arrivals": 19583, "corners": 19584, "tuning": 19585, "labour": 19586, "ym": 19587, "ordering": 19588, "lewi": 19589, "ife": 19590, "hygiene": 19591, "thoughtful": 19592, "indonesian": 19593, "campaigning": 19594, "principle": 19595, "assaul": 19596, "rubb": 19597, "atv": 19598, "willy": 19599, "entre": 19600, "ili": 19601, "phon": 19602, "duties": 19603, "âĻ„âĻ„": 19604, "snakes": 19605, "loop": 19606, "amar": 19607, "convertible": 19608, "bonding": 19609, "mentoring": 19610, "maxwell": 19611, "ethereum": 19612, "destroying": 19613, "axis": 19614, "cairo": 19615, "finnish": 19616, "shock": 19617, "ðŁĺIJ": 19618, "caleb": 19619, "coma": 19620, "pedal": 19621, "core": 19622, "continent": 19623, "elson": 19624, "tempo": 19625, "helsinki": 19626, "acp": 19627, "tackling": 19628, "stated": 19629, "bla": 19630, "doub": 19631, "smashing": 19632, "aja": 19633, "cameron": 19634, "disruption": 19635, "warmth": 19636, "beingsalmankhan": 19637, "bulletin": 19638, "ode": 19639, "syracuse": 19640, "aran": 19641, "mcgregor": 19642, "bulk": 19643, "anton": 19644, "confirmation": 19645, "spine": 19646, "imran": 19647, "instruc": 19648, "jacks": 19649, "chio": 19650, "palm": 19651, "stre": 19652, "embarrassing": 19653, "unt": 19654, "eliminate": 19655, "toss": 19656, "cise": 19657, "aws": 19658, "onists": 19659, "shinee": 19660, "jos": 19661, "hose": 19662, "lively": 19663, "opponents": 19664, "movements": 19665, "recognizing": 19666, "sandwiches": 19667, "shakes": 19668, "exercises": 19669, "seat": 19670, "profession": 19671, "merrychristmas": 19672, "lugg": 19673, "adoptdont": 19674, "marvin": 19675, "byrne": 19676, "unle": 19677, "het": 19678, "kuwait": 19679, "rahman": 19680, "aspect": 19681, "humbled": 19682, "genes": 19683, "fand": 19684, "longtime": 19685, ");": 19686, "campu": 19687, "angus": 19688, "ðŁijįðŁı¼": 19689, "quran": 19690, "sleeves": 19691, "slic": 19692, "ĀøĆ«": 19693, "twelve": 19694, "youre": 19695, "ike": 19696, "gogh": 19697, "bst": 19698, "dictionary": 19699, "reflecting": 19700, "toon": 19701, "yarn": 19702, "embed": 19703, "ðŁı“": 19704, "reserves": 19705, "flooded": 19706, "veriz": 19707, "dusk": 19708, "establish": 19709, "proli": 19710, "aud": 19711, "ritual": 19712, "orbit": 19713, "declaration": 19714, "recordings": 19715, "camo": 19716, "cassette": 19717, "goodluck": 19718, "cutter": 19719, "bop": 19720, "bho": 19721, "cheating": 19722, "pacific": 19723, "mares": 19724, "timer": 19725, "colt": 19726, "trous": 19727, "tomorrow": 19728, "hansen": 19729, "cie": 19730, "wang": 19731, "bani": 19732, "circular": 19733, "acute": 19734, "farmer": 19735, "coys": 19736, "pse": 19737, "irving": 19738, "wj": 19739, "hawkins": 19740, "bison": 19741, "urday": 19742, "cruising": 19743, "ote": 19744, "kath": 19745, "whistle": 19746, "yourselves": 19747, "antis": 19748, "slash": 19749, "thoroughly": 19750, "kesh": 19751, "serie": 19752, "exem": 19753, "enig": 19754, "guild": 19755, "shred": 19756, "hogan": 19757, "apo": 19758, "ä¸": 19759, "puzz": 19760, "netball": 19761, "aussi": 19762, "panorama": 19763, "wsj": 19764, "avis": 19765, "arming": 19766, "humph": 19767, "browser": 19768, "cries": 19769, "foggy": 19770, "matte": 19771, "ðŁĮ»": 19772, "iter": 19773, "tallest": 19774, "byron": 19775, "captiv": 19776, "jesu": 19777, "anyways": 19778, "flagship": 19779, "pton": 19780, "wey": 19781, "fayette": 19782, "financial": 19783, "foul": 19784, "solomon": 19785, "jennifer": 19786, "cucumber": 19787, "argue": 19788, "textile": 19789, "wrestler": 19790, "johnston": 19791, "pastor": 19792, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, "cactus": 19794, "edible": 19795, "reserved": 19796, "richie": 19797, "metres": 19798, "ingredient": 19799, "hella": 19800, "unto": 19801, "chol": 19802, "celebs": 19803, "poets": 19804, "graham": 19805, "hayden": 19806, "coincidence": 19807, "baw": 19808, "communicate": 19809, "fletcher": 19810, "/-": 19811, "toledo": 19812, "ecuador": 19813, "counsel": 19814, "slaughter": 19815, "linear": 19816, "atp": 19817, "osu": 19818, "joel": 19819, "eved": 19820, "conquer": 19821, "rustic": 19822, "plicity": 19823, "recognise": 19824, "roommate": 19825, "cracked": 19826, "jasper": 19827, "pher": 19828, "ðŁĮº": 19829, "woven": 19830, "moist": 19831, "ffc": 19832, "steering": 19833, "nish": 19834, "standings": 19835, "frequent": 19836, "ardi": 19837, "hazel": 19838, "asmsg": 19839, "baum": 19840, "dart": 19841, "sidd": 19842, "nath": 19843, "chero": 19844, "cardboard": 19845, "css": 19846, "nsfw": 19847, "pair": 19848, "ðŁĺįðŁĺĺ": 19849, "occurred": 19850, "homelessness": 19851, "malone": 19852, "phe": 19853, "xia": 19854, "paddy": 19855, "declare": 19856, "theatre": 19857, "bf": 19858, "persian": 19859, "tad": 19860, "axe": 19861, "suspicious": 19862, "lamb": 19863, "mucho": 19864, "senior": 19865, "stas": 19866, "kite": 19867, "sting": 19868, "grad": 19869, "kaf": 19870, "watering": 19871, "د": 19872, "spiral": 19873, "thms": 19874, "educator": 19875, "jerome": 19876, "ofc": 19877, "clock": 19878, "sul": 19879, "pemb": 19880, ".........": 19881, "parkway": 19882, "deaux": 19883, "restrictions": 19884, "mons": 19885, "needle": 19886, "ej": 19887, "leagues": 19888, "watermelon": 19889, "aman": 19890, "plenary": 19891, "maxim": 19892, "wab": 19893, "comingsoon": 19894, "bryce": 19895, "vigil": 19896, "supermarket": 19897, "fortunate": 19898, "turquoise": 19899, "president": 19900, "liv": 19901, "interns": 19902, "feelin": 19903, "fixtures": 19904, "stunt": 19905, "staged": 19906, "premieres": 19907, "lok": 19908, "practiti": 19909, "shortage": 19910, "logne": 19911, "vec": 19912, "concor": 19913, "rocke": 19914, "lig": 19915, "composed": 19916, "synthetic": 19917, "dip": 19918, "camila": 19919, "chis": 19920, "jou": 19921, "susan": 19922, "eyebrows": 19923, "supplement": 19924, "satisfaction": 19925, "mohammad": 19926, "tibet": 19927, "houseof": 19928, "pun": 19929, "assam": 19930, "shadowhun": 19931, "psyched": 19932, "seduc": 19933, "mandatory": 19934, "herbert": 19935, "scallo": 19936, "streamers": 19937, "protocol": 19938, "blockbuster": 19939, "produces": 19940, "schnei": 19941, "laurel": 19942, "tribe": 19943, "timehop": 19944, "pla": 19945, "modelling": 19946, "tvtime": 19947, "mtvstars": 19948, "widow": 19949, "metric": 19950, "cham": 19951, "condo": 19952, "flowering": 19953, "alec": 19954, "dms": 19955, "intensity": 19956, "¨": 19957, "mccartney": 19958, "islamabad": 19959, "kb": 19960, "ffi": 19961, "phal": 19962, "analog": 19963, "fond": 19964, "hacks": 19965, "positivity": 19966, "treaty": 19967, "submarine": 19968, "connect": 19969, "selen": 19970, "categories": 19971, "cub": 19972, "organize": 19973, "sik": 19974, "quoteoftheday": 19975, "reminding": 19976, "amor": 19977, "locking": 19978, "ðŁijıðŁı¼": 19979, "compound": 19980, "ette": 19981, "bout": 19982, "recur": 19983, "ference": 19984, "mizz": 19985, "trend": 19986, "hipster": 19987, "fortress": 19988, "forthcoming": 19989, "prelimin": 19990, "odyssey": 19991, "angp": 19992, "delici": 19993, "evenings": 19994, "ðŁĶ¹": 19995, "iq": 19996, "dw": 19997, "dair": 19998, "kathryn": 19999, "christianity": 20000, "moonlight": 20001, "hab": 20002, "whoo": 20003, "fbf": 20004, "seth": 20005, "genuinely": 20006, "pax": 20007, "charity": 20008, "deployed": 20009, "bnb": 20010, "bucs": 20011, "judg": 20012, "conge": 20013, "plantation": 20014, "impress": 20015, "cara": 20016, "sclub": 20017, "scopy": 20018, "landers": 20019, "complaints": 20020, "bama": 20021, "rebuild": 20022, "xy": 20023, "realism": 20024, "shour": 20025, "lein": 20026, "bracelets": 20027, "mera": 20028, "assassin": 20029, "anchor": 20030, "ðŁijĮðŁı¼": 20031, "linen": 20032, "confron": 20033, "chronicle": 20034, "comment": 20035, "catalog": 20036, "illes": 20037, "gorge": 20038, "metry": 20039, "jungkook": 20040, "lovemy": 20041, "sentin": 20042, "seem": 20043, "fitness": 20044, "allied": 20045, "tsman": 20046, "digitaltransformation": 20047, "pran": 20048, "loft": 20049, "minton": 20050, "aldenrichards": 20051, "envel": 20052, "cherish": 20053, "certainty": 20054, "zzz": 20055, "rhino": 20056, "perkins": 20057, "enrich": 20058, "capetown": 20059, "ometer": 20060, "sections": 20061, "skeleton": 20062, "defenders": 20063, "ðŁĺĿ": 20064, "penc": 20065, "brit": 20066, "jah": 20067, "capitalism": 20068, "ðŁ„ĩ": 20069, "bazaar": 20070, "reme": 20071, "ext": 20072, "kkk": 20073, "convert": 20074, "stormy": 20075, "bye": 20076, "karan": 20077, "chrysler": 20078, "ados": 20079, "pressed": 20080, "sync": 20081, "ationday": 20082, "danger": 20083, "badges": 20084, "refuses": 20085, "empowering": 20086, "lym": 20087, "exports": 20088, "adoptdontshop": 20089, "ðŁĩ¯": 20090, "thc": 20091, "awaited": 20092, "focuses": 20093, "fined": 20094, "oat": 20095, "hahahah": 20096, "âģ©": 20097, "nfamily": 20098, "fiona": 20099, "luckily": 20100, "thrilling": 20101, "typing": 20102, "outbreak": 20103, "dies": 20104, "heu": 20105, "crawl": 20106, "nesses": 20107, "oath": 20108, "scripts": 20109, "geeks": 20110, "ðŁIJĿ": 20111, "pb": 20112, "mathematics": 20113, "alis": 20114, "________________": 20115, "gymnastics": 20116, "activism": 20117, "recommendation": 20118, "gren": 20119, "wain": 20120, "courty": 20121, "napol": 20122, "cauli": 20123, "hornets": 20124, "gals": 20125, "jockey": 20126, "dirty": 20127, "atar": 20128, "enormous": 20129, "pest": 20130, "gregation": 20131, "anos": 20132, "iiii": 20133, "defends": 20134, "blackhistorymonth": 20135, "atx": 20136, "mbc": 20137, "luggage": 20138, "witch": 20139, "cob": 20140, "lasts": 20141, "cum": 20142, "ggg": 20143, "bathing": 20144, "nar": 20145, "cebu": 20146, "ðŁįĄ": 20147, "navigation": 20148, "mine": 20149, "rejo": 20150, "ðŁİĢ": 20151, "giftide": 20152, "reta": 20153, "useless": 20154, "pull": 20155, "deficit": 20156, "allu": 20157, "atime": 20158, "itv": 20159, "trillion": 20160, "pue": 20161, "acies": 20162, "procedure": 20163, "lori": 20164, "jenny": 20165, "cad": 20166, "ulously": 20167, "drac": 20168, "promotes": 20169, "ingthe": 20170, "canu": 20171, "woohoo": 20172, "naomi": 20173, "zardari": 20174, "tsu": 20175, "beir": 20176, "sdg": 20177, "lever": 20178, "weber": 20179, "abud": 20180, "lund": 20181, "crowded": 20182, "deployment": 20183, "terrain": 20184, "kenny": 20185, "hof": 20186, "witnessed": 20187, "loch": 20188, "jk": 20189, "bully": 20190, "wren": 20191, "poetry": 20192, "doff": 20193, "wwi": 20194, "mored": 20195, "dini": 20196, "culture": 20197, "prompt": 20198, "„": 20199, "maurice": 20200, "topps": 20201, "rm": 20202, "correspon": 20203, "about": 20204, "jewels": 20205, "gibr": 20206, "eagle": 20207, "ðŁĺĺðŁĺĺðŁĺĺ": 20208, "lending": 20209, "souven": 20210, "ƧĶ": 20211, "contemporaryart": 20212, "establishment": 20213, "jong": 20214, "â̦\"": 20215, "gator": 20216, "patriotic": 20217, "mccoy": 20218, "vape": 20219, "humane": 20220, "feliz": 20221, "coachella": 20222, "reposting": 20223, "steals": 20224, "fuller": 20225, "nering": 20226, "atra": 20227, "(-": 20228, "blake": 20229, "heather": 20230, "worms": 20231, "disciplinary": 20232, "redemption": 20233, "yard": 20234, "amin": 20235, "\"@_": 20236, "dnc": 20237, "tds": 20238, "kappa": 20239, "newark": 20240, "commits": 20241, "spears": 20242, "jams": 20243, "tand": 20244, "msnbc": 20245, "intermedi": 20246, "aimed": 20247, "atic": 20248, "teenth": 20249, "observation": 20250, "kashmir": 20251, "kavanaugh": 20252, "oul": 20253, "sanfrancisco": 20254, "reu": 20255, "belated": 20256, "chow": 20257, "password": 20258, "stills": 20259, "detained": 20260, "sari": 20261, "dayton": 20262, "darren": 20263, "italian": 20264, "arth": 20265, "amusic": 20266, "arbit": 20267, "wm": 20268, "vm": 20269, "hem": 20270, "doug": 20271, "myr": 20272, "asho": 20273, "prev": 20274, "vind": 20275, "brah": 20276, "stag": 20277, "ี": 20278, "previews": 20279, "guk": 20280, "containing": 20281, "leonardo": 20282, "saddle": 20283, "rushing": 20284, "stav": 20285, "longh": 20286, "gambling": 20287, "vegas": 20288, "reservation": 20289, "endale": 20290, "bala": 20291, "fla": 20292, "variant": 20293, "hedge": 20294, "bulgaria": 20295, "natali": 20296, "weaver": 20297, "solst": 20298, "encouraged": 20299, "apc": 20300, "asparag": 20301, "nest": 20302, "cyclists": 20303, "fel": 20304, "ìĬ¤": 20305, "overwhelming": 20306, "peyton": 20307, "jit": 20308, "apost": 20309, "mble": 20310, "bleeding": 20311, "neighbourhood": 20312, "avery": 20313, "expressions": 20314, "macdonald": 20315, "gigs": 20316, "monds": 20317, "illusion": 20318, "nct": 20319, "camero": 20320, "overhead": 20321, "myth": 20322, "oly": 20323, "vio": 20324, "etv": 20325, "laurie": 20326, "unveiling": 20327, "prior": 20328, "conn": 20329, "ironman": 20330, "diff": 20331, "dayin": 20332, "critici": 20333, "congo": 20334, "revision": 20335, "wale": 20336, "director": 20337, "pines": 20338, "blackpink": 20339, "garner": 20340, "curated": 20341, "manitoba": 20342, "hac": 20343, "commonly": 20344, "barton": 20345, "....#": 20346, "mortality": 20347, "livesmatter": 20348, "philosop": 20349, "shorter": 20350, "convince": 20351, "freak": 20352, "vendors": 20353, "insightful": 20354, "elly": 20355, "sensors": 20356, "eled": 20357, "sberg": 20358, "weightloss": 20359, "ukip": 20360, "spur": 20361, "private": 20362, "qua": 20363, "ssc": 20364, ",...": 20365, "supervisor": 20366, "adviser": 20367, "amazingly": 20368, "lesser": 20369, "ates": 20370, "mahon": 20371, "oooooo": 20372, "saras": 20373, "pmoindia": 20374, "waffle": 20375, "unders": 20376, "tolerance": 20377, "sculptures": 20378, "hersh": 20379, "knocking": 20380, "smoke": 20381, "catholic": 20382, "grim": 20383, "traveled": 20384, "flip": 20385, "geoff": 20386, "dinosaurs": 20387, "slept": 20388, "scarlet": 20389, "oki": 20390, "complaint": 20391, "obsc": 20392, "nami": 20393, "lag": 20394, "crossfit": 20395, "ufc": 20396, "mccain": 20397, "referee": 20398, "sadness": 20399, "penny": 20400, "lieu": 20401, "mode": 20402, "kier": 20403, "vols": 20404, "wis": 20405, "elon": 20406, "shea": 20407, "bao": 20408, "sonia": 20409, "claire": 20410, "emmanuel": 20411, "moisture": 20412, "digest": 20413, "viii": 20414, "teller": 20415, "chon": 20416, "accessory": 20417, "nightclub": 20418, "fossil": 20419, "awan": 20420, "husky": 20421, "aboriginal": 20422, "brandon": 20423, "fficient": 20424, "cougars": 20425, "sted": 20426, "admitted": 20427, "ignored": 20428, "contentmarketing": 20429, "agas": 20430, "vase": 20431, "executed": 20432, "negotiations": 20433, "shead": 20434, "nand": 20435, "tablets": 20436, "goth": 20437, "tsal": 20438, "dfw": 20439, "onep": 20440, "protector": 20441, "spho": 20442, "gazette": 20443, "andreas": 20444, "sser": 20445, "compilation": 20446, "hav": 20447, "containers": 20448, "broker": 20449, "socal": 20450, "porcelain": 20451, "hyuk": 20452, "airing": 20453, "ðŁē°": 20454, "publisher": 20455, "scenario": 20456, "spartans": 20457, "reviewing": 20458, "itudes": 20459, "edel": 20460, "pearson": 20461, "bash": 20462, "maui": 20463, "aad": 20464, "ðŁĮĬ": 20465, "liu": 20466, "ulate": 20467, "programmes": 20468, "favour": 20469, "webdesign": 20470, "realty": 20471, "motivational": 20472, "crosses": 20473, "'...": 20474, "busch": 20475, "adjustable": 20476, "arjun": 20477, "mistak": 20478, "dimension": 20479, "pistol": 20480, "weighs": 20481, "eny": 20482, "unveil": 20483, "indycar": 20484, "gordon": 20485, "fade": 20486, "franken": 20487, "qualities": 20488, "bett": 20489, "locate": 20490, "kerr": 20491, "spc": 20492, "confusion": 20493, "nee": 20494, "lucky": 20495, "bases": 20496, "depends": 20497, "firefighter": 20498, "ola": 20499, "ret": 20500, "maroon": 20501, "ðŁĶĬ": 20502, "wam": 20503, "defining": 20504, "wheat": 20505, "bil": 20506, "és": 20507, "bhai": 20508, "psych": 20509, "tau": 20510, "icans": 20511, "thik": 20512, "obile": 20513, "inspector": 20514, "ìĨĮë": 20515, "illon": 20516, "gos": 20517, "evangel": 20518, "fai": 20519, "sist": 20520, "vocation": 20521, "burge": 20522, "chistan": 20523, "renewed": 20524, "enthusiasm": 20525, "enting": 20526, "agri": 20527, "ikea": 20528, "msc": 20529, "aerospace": 20530, "sensiti": 20531, "memoir": 20532, "hospice": 20533, "cocaine": 20534, "derry": 20535, "mechanics": 20536, "Ħà¸": 20537, "tino": 20538, "reduces": 20539, "collectors": 20540, "injustice": 20541, "suppre": 20542, "vana": 20543, "abun": 20544, "napa": 20545, "susa": 20546, "oslo": 20547, "eff": 20548, "encore": 20549, "licence": 20550, "cheddar": 20551, "zal": 20552, "mount": 20553, "ðŁēIJ": 20554, "threatens": 20555, "!!\"": 20556, "archie": 20557, "futsal": 20558, "scuba": 20559, "jos": 20560, "gnon": 20561, "sexi": 20562, "sofficial": 20563, "comparing": 20564, "dominant": 20565, "toftheday": 20566, "fait": 20567, "proposals": 20568, "gift": 20569, "yas": 20570, "cnc": 20571, "lr": 20572, "hab": 20573, "reservoir": 20574, "beliefs": 20575, "general": 20576, "marti": 20577, "td": 20578, "este": 20579, "ìł": 20580, "wil": 20581, "ðŁij¯": 20582, "ðŁĶ«": 20583, "spx": 20584, "etwork": 20585, "excerpt": 20586, "einstein": 20587, "hiro": 20588, "silhou": 20589, "teamed": 20590, "perception": 20591, "corridor": 20592, "mentalhealth": 20593, "hints": 20594, "benny": 20595, "inducted": 20596, "swx": 20597, "widesp": 20598, "speak": 20599, "cheryl": 20600, "drug": 20601, "ðŁĺķ": 20602, "hf": 20603, "asparagus": 20604, "mysteries": 20605, "fitzgerald": 20606, "offer": 20607, "therapist": 20608, "career": 20609, "damaging": 20610, "tsd": 20611, "peru": 20612, "weibo": 20613, "yay": 20614, "phoenix": 20615, "discre": 20616, "macbook": 20617, "barker": 20618, "stigma": 20619, "spread": 20620, "rockies": 20621, "kangar": 20622, "bridg": 20623, "pai": 20624, "bishop": 20625, "tailed": 20626, "capsule": 20627, "ðŁēĵ": 20628, "geof": 20629, "royale": 20630, "shortlisted": 20631, "oste": 20632, "ashamed": 20633, "chapp": 20634, "keye": 20635, "cla": 20636, "screenshot": 20637, "austrian": 20638, "native": 20639, "enight": 20640, "juliet": 20641, "michele": 20642, "ðŁĮ“": 20643, "travelers": 20644, "pil": 20645, "footballer": 20646, "winchester": 20647, "ðŁĻĦ": 20648, "azerbai": 20649, "goldeng": 20650, "organisations": 20651, "interpretation": 20652, "predator": 20653, "oftheweek": 20654, "logan": 20655, "poké": 20656, "marie": 20657, "calla": 20658, "tnt": 20659, "cinde": 20660, "getic": 20661, "fitfam": 20662, "grav": 20663, "owens": 20664, "ðŁĮ±": 20665, "shootout": 20666, "salis": 20667, "commissions": 20668, "cohe": 20669, "ptic": 20670, "nixon": 20671, "hia": 20672, "ambition": 20673, "marine": 20674, "cruelty": 20675, "tk": 20676, "crude": 20677, "salty": 20678, "jima": 20679, "mongo": 20680, "irony": 20681, "onwards": 20682, "arrests": 20683, "strangers": 20684, "iger": 20685, "cyclist": 20686, "rag": 20687, "extends": 20688, "tradio": 20689, "bourg": 20690, "moi": 20691, "ella": 20692, "eable": 20693, "lexus": 20694, "aul": 20695, "dera": 20696, "historian": 20697, "morton": 20698, "tiff": 20699, "manner": 20700, "kot": 20701, "dk": 20702, "pointed": 20703, "marqu": 20704, "aan": 20705, "eney": 20706, "dublin": 20707, "onpoli": 20708, "emili": 20709, "secret": 20710, "flo": 20711, "âļ”": 20712, "baj": 20713, "steep": 20714, "accompanied": 20715, "rumours": 20716, "devi": 20717, "purchasing": 20718, "fig": 20719, "pub": 20720, "schoo": 20721, "autonomous": 20722, "goalie": 20723, "xia": 20724, "automatically": 20725, "revers": 20726, "tero": 20727, "fuku": 20728, "titanic": 20729, "shook": 20730, "sandals": 20731, "seekers": 20732, "excav": 20733, "nordic": 20734, "bigolive": 20735, "bake": 20736, "ratt": 20737, "zak": 20738, "nep": 20739, "ðŁĺ¤": 20740, "candy": 20741, "billions": 20742, "bookworm": 20743, "ppet": 20744, "à³": 20745, "surfaces": 20746, "scars": 20747, "philip": 20748, "dogg": 20749, "cigars": 20750, "cote": 20751, "translated": 20752, "curator": 20753, "sindh": 20754, "hangover": 20755, "brewer": 20756, "ones": 20757, "elton": 20758, "ðŁēªðŁı¼": 20759, "marcu": 20760, "elliot": 20761, "righte": 20762, "dioce": 20763, "russ": 20764, "railways": 20765, "grandson": 20766, "ascen": 20767, "apology": 20768, "await": 20769, "mobili": 20770, "respir": 20771, "partisan": 20772, "olivi": 20773, "strike": 20774, "yoo": 20775, "whitehouse": 20776, "expressed": 20777, "pups": 20778, "bedford": 20779, "cultur": 20780, "frogs": 20781, "flying": 20782, "cavali": 20783, "cds": 20784, "friger": 20785, "streetphotography": 20786, "resolve": 20787, "taliban": 20788, "kang": 20789, "crushing": 20790, "jum": 20791, "ðŁĺē": 20792, "williamson": 20793, "tang": 20794, "curly": 20795, "tman": 20796, "veteran": 20797, "faire": 20798, "artificialintelligence": 20799, "unanim": 20800, "pren": 20801, "backdrop": 20802, "frances": 20803, "occer": 20804, "dorothy": 20805, "working": 20806, "arthr": 20807, "converted": 20808, "daylight": 20809, "servant": 20810, "paddle": 20811, "complaining": 20812, "thirty": 20813, "nadal": 20814, "aku": 20815, "ibrahim": 20816, "addressed": 20817, "piss": 20818, "greenhouse": 20819, "battalion": 20820, "simulator": 20821, "outlets": 20822, "embroidery": 20823, "ðŁĵ±": 20824, "fiscal": 20825, "gerard": 20826, "sassy": 20827, "ðŁİīðŁİīðŁİī": 20828, "ventures": 20829, "merit": 20830, "publicity": 20831, "ðŁijĪ": 20832, "sophisticated": 20833, "ctu": 20834, "conventional": 20835, "condolences": 20836, "israel": 20837, "tradition": 20838, "aran": 20839, "tess": 20840, "glad": 20841, "ðŁĺĬðŁĺĬ": 20842, "correction": 20843, "geon": 20844, "amd": 20845, "orship": 20846, "beast": 20847, "chment": 20848, "ìŀ": 20849, "nico": 20850, "wknd": 20851, "wels": 20852, "cushion": 20853, "belie": 20854, "voc": 20855, "idiots": 20856, "underneath": 20857, "puma": 20858, "cornell": 20859, "enation": 20860, "lul": 20861, "swach": 20862, "abig": 20863, "urer": 20864, "mie": 20865, "formerly": 20866, "caf": 20867, "ernal": 20868, "chorus": 20869, "julius": 20870, "senator": 20871, "âľį": 20872, "whir": 20873, "salvador": 20874, "phd": 20875, "unified": 20876, "booster": 20877, "graphical": 20878, "wrec": 20879, "sonny": 20880, "miz": 20881, "derers": 20882, "sall": 20883, "vens": 20884, "tuscany": 20885, "wid": 20886, "yong": 20887, "kurds": 20888, "waz": 20889, "trolls": 20890, "macro": 20891, "caturday": 20892, "pressing": 20893, "sasha": 20894, "centennial": 20895, "gusts": 20896, "emc": 20897, "before": 20898, "denise": 20899, "cust": 20900, "ðŁĵ¢": 20901, "looo": 20902, "basel": 20903, "england": 20904, "yolo": 20905, "ardu": 20906, "manifesto": 20907, "doha": 20908, "ìľ": 20909, "knives": 20910, "bournemouth": 20911, "bibl": 20912, "barb": 20913, "alicia": 20914, "Ø©": 20915, "comer": 20916, "cyclone": 20917, "git": 20918, "anews": 20919, "characteri": 20920, "ventura": 20921, "intra": 20922, "sfgiants": 20923, "hut": 20924, "bea": 20925, "darwin": 20926, "eller": 20927, "alv": 20928, "reese": 20929, "bly": 20930, "karan": 20931, "conclusion": 20932, "manny": 20933, "flakes": 20934, "uniteblue": 20935, "nadu": 20936, "copp": 20937, "edges": 20938, "lancashire": 20939, "ials": 20940, "otta": 20941, "philippe": 20942, "lent": 20943, "chee": 20944, "mentors": 20945, "festival": 20946, "anism": 20947, "complimentary": 20948, "rj": 20949, "pug": 20950, "dine": 20951, "wei": 20952, "cliffs": 20953, "sarmy": 20954, "tiveness": 20955, "treasury": 20956, "iland": 20957, "aftermath": 20958, "rabbi": 20959, "oun": 20960, "bouquet": 20961, "heritage": 20962, "zion": 20963, "surrender": 20964, "shenan": 20965, "inks": 20966, "karl": 20967, "ghty": 20968, "policing": 20969, "examination": 20970, "cey": 20971, "persu": 20972, "measurement": 20973, "hydrogen": 20974, "luhan": 20975, "âłĢâłĢâłĢâłĢ": 20976, "wari": 20977, "оÐ": 20978, "jy": 20979, "fowler": 20980, "mish": 20981, "alfre": 20982, "âĺij": 20983, "bbnaija": 20984, "catalogue": 20985, "recognised": 20986, "saver": 20987, "huskies": 20988, "colin": 20989, "mundo": 20990, "siva": 20991, "png": 20992, "discounted": 20993, "manutd": 20994, "fresno": 20995, "devin": 20996, "preliminary": 20997, "trophies": 20998, "plastics": 20999, "dug": 21000, "procu": 21001, "indigo": 21002, "gard": 21003, "dylan": 21004, "pitches": 21005, "groundbreaking": 21006, "inson": 21007, "blac": 21008, "anthology": 21009, "fh": 21010, "explic": 21011, "rard": 21012, "admiral": 21013, "sochi": 21014, "lashes": 21015, "splendid": 21016, "envy": 21017, "adv": 21018, "sexy": 21019, "festivities": 21020, "sticking": 21021, "bib": 21022, "thrill": 21023, "opp": 21024, "ariel": 21025, "botanical": 21026, "endurance": 21027, "females": 21028, "bricks": 21029, "vatican": 21030, "blackpool": 21031, "bermu": 21032, "brough": 21033, "roller": 21034, "bid": 21035, "suede": 21036, "slovenia": 21037, "mming": 21038, "mlb": 21039, "medalist": 21040, "dians": 21041, "rehabilitation": 21042, "neon": 21043, "sgo": 21044, "lithu": 21045, "ramos": 21046, "zed": 21047, "pianist": 21048, "intensive": 21049, "broadband": 21050, "study": 21051, "petersburg": 21052, "luca": 21053, "ahhhh": 21054, "physician": 21055, "dillon": 21056, "telecom": 21057, "grief": 21058, "mun": 21059, "acro": 21060, "sided": 21061, "sly": 21062, "blows": 21063, "classiccars": 21064, "trium": 21065, "argy": 21066, "?:": 21067, "hri": 21068, "marshmal": 21069, "âĢĵ": 21070, "topping": 21071, "warsaw": 21072, "transc": 21073, "preservation": 21074, "bav": 21075, "refriger": 21076, "experiments": 21077, "äº": 21078, "glit": 21079, "sliga": 21080, "gage": 21081, "factor": 21082, "flavours": 21083, "brony": 21084, "spo": 21085, "cookbook": 21086, "carriage": 21087, "away": 21088, "nyfw": 21089, "onian": 21090, "wg": 21091, "simpsons": 21092, "rolex": 21093, "ðŁı¿": 21094, "crosby": 21095, "ãħ¤": 21096, "credi": 21097, "syndic": 21098, "pubs": 21099, "alife": 21100, "poorly": 21101, "maced": 21102, "ðŁĺŀ": 21103, "behindthe": 21104, "wenger": 21105, "nats": 21106, "ðŁİŁ": 21107, "rubbish": 21108, "procedures": 21109, "typhoon": 21110, "ophobia": 21111, "erdo": 21112, "fuel": 21113, "viera": 21114, "bumps": 21115, "millennium": 21116, "newzealand": 21117, "lectures": 21118, "iton": 21119, "milky": 21120, "responded": 21121, "ê°": 21122, "landscape": 21123, "..@": 21124, "bother": 21125, "âĸ¶": 21126, "zhang": 21127, "huawei": 21128, "tuition": 21129, "sworn": 21130, "inu": 21131, "yor": 21132, "paolo": 21133, "auditions": 21134, "abil": 21135, "malaysian": 21136, "hops": 21137, "feathers": 21138, "mple": 21139, "auts": 21140, "ão": 21141, "bounty": 21142, "iche": 21143, "ìĺ": 21144, "shq": 21145, "pinot": 21146, "gears": 21147, "disappear": 21148, "videogames": 21149, "tna": 21150, "alzheimer": 21151, "ðŁĮŀ": 21152, "aji": 21153, "underwear": 21154, "switching": 21155, "signage": 21156, "oscar": 21157, "econ": 21158, "drow": 21159, "clint": 21160, "plated": 21161, "gundy": 21162, "emblem": 21163, "hoes": 21164, "icist": 21165, "nelly": 21166, "junior": 21167, "roadshow": 21168, "minerals": 21169, "atle": 21170, "alexandria": 21171, "acclaimed": 21172, "vell": 21173, "shiva": 21174, "adhe": 21175, "enne": 21176, "amnesty": 21177, "hounds": 21178, "councillor": 21179, "ðŁē¦": 21180, "aesthe": 21181, "partnering": 21182, "influenced": 21183, "magno": 21184, "flare": 21185, "extinction": 21186, "civilian": 21187, "majesty": 21188, "vail": 21189, "lawmakers": 21190, "racks": 21191, "mcc": 21192, "orian": 21193, "spices": 21194, "errors": 21195, "mayer": 21196, "coca": 21197, "pai": 21198, "sooooo": 21199, "retiring": 21200, "bathro": 21201, "ðŁĻĮðŁĻĮ": 21202, "âĸª": 21203, "suf": 21204, "endorsement": 21205, "building": 21206, "brooch": 21207, "palla": 21208, "arvind": 21209, "agent": 21210, "karate": 21211, "rhi": 21212, "ctv": 21213, "taine": 21214, "umm": 21215, "bax": 21216, "reigns": 21217, "uniof": 21218, "enterprises": 21219, "adele": 21220, "flake": 21221, "attire": 21222, "bruce": 21223, "bahamas": 21224, "gravy": 21225, "sain": 21226, "cheek": 21227, "trivi": 21228, "lov": 21229, "een": 21230, "bblo": 21231, "ladygaga": 21232, "itta": 21233, ".\"-": 21234, "dustin": 21235, "observatory": 21236, "eighth": 21237, "bloomberg": 21238, "khs": 21239, "fcc": 21240, "gist": 21241, "commemorate": 21242, "veer": 21243, "sexuality": 21244, "edc": 21245, "nicole": 21246, "vacancy": 21247, "user": 21248, "sona": 21249, ":'(": 21250, "diploma": 21251, "tend": 21252, "upgrades": 21253, "ÅŁ": 21254, "jurassic": 21255, "cardiac": 21256, "drs": 21257, "widespread": 21258, "ĆƒÅ‚": 21259, "dailies": 21260, "vendor": 21261, "simplicity": 21262, "wider": 21263, "lenses": 21264, "supplements": 21265, "depos": 21266, "observed": 21267, "vines": 21268, "partially": 21269, "renewal": 21270, "collaborate": 21271, "alig": 21272, "finity": 21273, "phu": 21274, "zzy": 21275, "petit": 21276, "ðŁĵħ": 21277, "zin": 21278, "igu": 21279, "smack": 21280, "fallon": 21281, "ðŁĵ£": 21282, "backwards": 21283, "component": 21284, "oso": 21285, "compatible": 21286, "binding": 21287, "zurich": 21288, "thome": 21289, "wounds": 21290, "lyric": 21291, "freshmen": 21292, "sneaky": 21293, "fibro": 21294, "diet": 21295, "employer": 21296, "insect": 21297, "hated": 21298, "scher": 21299, "razor": 21300, "nsw": 21301, "booker": 21302, "californi": 21303, "avfc": 21304, "°": 21305, "pretending": 21306, "pepsi": 21307, "alis": 21308, "untitled": 21309, "kart": 21310, "grandparents": 21311, "ethe": 21312, "ock": 21313, "luxemb": 21314, "visuals": 21315, "smallbusiness": 21316, "abdullah": 21317, "minho": 21318, "subaru": 21319, "hra": 21320, "revealing": 21321, "heartbreaking": 21322, "clarity": 21323, "amg": 21324, "slr": 21325, "****": 21326, "âŀĸ": 21327, "record": 21328, "iciary": 21329, "minded": 21330, "yeh": 21331, "excessive": 21332, "knuck": 21333, "icecream": 21334, "truth": 21335, "evic": 21336, "tastic": 21337, "antarc": 21338, "rendering": 21339, ",,": 21340, "mitt": 21341, "lorenzo": 21342, "stpatrick": 21343, "boundary": 21344, "zig": 21345, "vocab": 21346, "osaka": 21347, "furn": 21348, "tun": 21349, "gul": 21350, "sounding": 21351, "blogger": 21352, "utterly": 21353, "gaf": 21354, "advancing": 21355, "lcd": 21356, "margin": 21357, "lifelong": 21358, "solstice": 21359, "shra": 21360, "waits": 21361, "plear": 21362, "breach": 21363, "enligh": 21364, "ader": 21365, "ittle": 21366, "cation": 21367, "hoon": 21368, "studied": 21369, "?????": 21370, "kash": 21371, "evangeli": 21372, "psl": 21373, "weights": 21374, "metals": 21375, "tyres": 21376, "turno": 21377, "wie": 21378, "carb": 21379, "gale": 21380, "seal": 21381, "sunite": 21382, "amic": 21383, "patterson": 21384, "Ôn": 21385, "euph": 21386, "upstairs": 21387, "qualifiers": 21388, "khalifa": 21389, "applemusic": 21390, "ìĨĮëħ": 21391, "vaughan": 21392, "alter": 21393, "cruiser": 21394, "mua": 21395, "tana": 21396, "katrina": 21397, "idols": 21398, "spoiled": 21399, "secretly": 21400, "fibre": 21401, "partnered": 21402, "umes": 21403, "giov": 21404, "comet": 21405, "screenshotsaturday": 21406, "keller": 21407, "filtr": 21408, "fet": 21409, "conway": 21410, "peu": 21411, "badminton": 21412, "gid": 21413, "mound": 21414, "donkey": 21415, "buff": 21416, "leather": 21417, "largely": 21418, "broch": 21419, "intments": 21420, "amuse": 21421, "rk": 21422, "stove": 21423, "impacted": 21424, "cont": 21425, "cracks": 21426, "prisoner": 21427, "bari": 21428, "contractor": 21429, "orioles": 21430, "dominate": 21431, "polar": 21432, "amelia": 21433, "drc": 21434, "ðŁijĮðŁijĮ": 21435, "vist": 21436, "suarez": 21437, "injection": 21438, "blooms": 21439, "ðŁļ¨ðŁļ¨": 21440, "stiff": 21441, "paypal": 21442, "snowing": 21443, "thursdays": 21444, "goose": 21445, "wedge": 21446, "educated": 21447, "weakness": 21448, "decker": 21449, "abudha": 21450, "breezy": 21451, "ƛĮ": 21452, "hopeful": 21453, "obi": 21454, "raider": 21455, "gham": 21456, "deu": 21457, "seve": 21458, "partly": 21459, "fut": 21460, "infused": 21461, "merri": 21462, "thane": 21463, "sometime": 21464, "hue": 21465, "mein": 21466, "credit": 21467, "sliding": 21468, "rande": 21469, "cherry": 21470, "deadpool": 21471, "shol": 21472, "aram": 21473, "underwood": 21474, "skye": 21475, "disturbing": 21476, "mnt": 21477, "polished": 21478, "guardians": 21479, "hadn": 21480, "picasso": 21481, "arius": 21482, "akshay": 21483, "irri": 21484, "jh": 21485, "happen": 21486, "lakh": 21487, "dalton": 21488, "atthe": 21489, "swell": 21490, "marsha": 21491, "reh": 21492, "cours": 21493, "jkt": 21494, "topus": 21495, "service": 21496, "rink": 21497, "hackers": 21498, "donovan": 21499, "horo": 21500, "tcm": 21501, "mayhem": 21502, "chase": 21503, "devops": 21504, "kensing": 21505, "scup": 21506, "shere": 21507, "qualification": 21508, "clive": 21509, "tong": 21510, "nancy": 21511, "maris": 21512, "derdale": 21513, "berman": 21514, "cinderella": 21515, "jolly": 21516, "cic": 21517, "loot": 21518, "collectibles": 21519, "homicide": 21520, "gge": 21521, "epidemic": 21522, "suites": 21523, "muddy": 21524, "gimme": 21525, "erec": 21526, "-*": 21527, "talla": 21528, "lisle": 21529, "embroide": 21530, "ðŁĩ©ðŁĩª": 21531, "verizon": 21532, "vector": 21533, "beanie": 21534, "artisan": 21535, "gain": 21536, "flores": 21537, "vigil": 21538, "uso": 21539, "ðŁĻıðŁı½": 21540, "grinding": 21541, "gher": 21542, "airports": 21543, "responsive": 21544, "shaft": 21545, "cancel": 21546, "ceremonies": 21547, "eme": 21548, "atari": 21549, "brushes": 21550, "eager": 21551, "bohemi": 21552, "childrens": 21553, "yankee": 21554, "maa": 21555, "suspense": 21556, "moran": 21557, "macar": 21558, "sunflower": 21559, "crew": 21560, "void": 21561, "kear": 21562, "fashioned": 21563, "jennings": 21564, "sundayfunday": 21565, "submissions": 21566, "mead": 21567, "herman": 21568, "wai": 21569, "critically": 21570, "leum": 21571, "baekhyun": 21572, "forcing": 21573, "cobra": 21574, "ãģ®": 21575, "acquire": 21576, "alk": 21577, "geology": 21578, "primar": 21579, "importantly": 21580, "irez": 21581, "bundesliga": 21582, "curiosity": 21583, "sena": 21584, "strict": 21585, "consoli": 21586, "winters": 21587, "venom": 21588, "cheltenham": 21589, "ðŁįº": 21590, "cena": 21591, "tat": 21592, "bain": 21593, "glover": 21594, "undercover": 21595, "asses": 21596, "carn": 21597, "memorialday": 21598, "ameli": 21599, "irene": 21600, "chon": 21601, "synthesis": 21602, "speedy": 21603, "mitsubi": 21604, "slayer": 21605, "composite": 21606, "understands": 21607, "pew": 21608, "interrup": 21609, "henri": 21610, "morrow": 21611, "anom": 21612, "thofjuly": 21613, "glee": 21614, "three": 21615, "ðŁĺ®": 21616, "andhi": 21617, "chatt": 21618, "renewables": 21619, "yes": 21620, "transfers": 21621, "!!!!!!!!": 21622, "babu": 21623, "duter": 21624, "loops": 21625, "peers": 21626, "oilers": 21627, "paulo": 21628, "ication": 21629, "hmu": 21630, "wara": 21631, "mercer": 21632, "homeland": 21633, "fuji": 21634, "aley": 21635, "yearbook": 21636, "rem": 21637, "reen": 21638, "absur": 21639, "bois": 21640, "]:": 21641, "caesar": 21642, "shotgun": 21643, "kurdish": 21644, "oren": 21645, "rae": 21646, "ancies": 21647, "typic": 21648, "fh": 21649, "default": 21650, "replic": 21651, "luk": 21652, "transactions": 21653, "rys": 21654, "infantry": 21655, "ðŁį¾": 21656, "chow": 21657, "chickens": 21658, "bagh": 21659, "wyatt": 21660, "aye": 21661, "ggi": 21662, "brews": 21663, "editions": 21664, "mira": 21665, "commencement": 21666, "presu": 21667, "periscope": 21668, "ichi": 21669, "guatemala": 21670, "zambia": 21671, "paints": 21672, "witches": 21673, "wani": 21674, "undere": 21675, "croy": 21676, "vows": 21677, "usmc": 21678, "hearted": 21679, "theatres": 21680, "shuffle": 21681, "level": 21682, "multic": 21683, "squeeze": 21684, "fern": 21685, "appet": 21686, "postal": 21687, "malt": 21688, "onboard": 21689, "ldnt": 21690, "coo": 21691, "ssc": 21692, "kac": 21693, "ðŁĺĩ": 21694, "scrap": 21695, "marcos": 21696, "dealers": 21697, "annu": 21698, "miller": 21699, "cove": 21700, "ulary": 21701, "vladimir": 21702, "beef": 21703, "thur": 21704, "pickled": 21705, "sesame": 21706, "bengaluru": 21707, "mott": 21708, "kathleen": 21709, "hist": 21710, "notor": 21711, "drank": 21712, "duchess": 21713, "snowfall": 21714, "eff": 21715, "tiny": 21716, "jn": 21717, "syour": 21718, "specialists": 21719, "scotus": 21720, "baylor": 21721, "everest": 21722, "malibu": 21723, "prem": 21724, "harmful": 21725, "lali": 21726, "bates": 21727, "gye": 21728, "differenti": 21729, "andra": 21730, "geometry": 21731, "elover": 21732, "blackout": 21733, "====": 21734, "kota": 21735, "interact": 21736, "asian": 21737, "layo": 21738, "samurai": 21739, "fidel": 21740, "exhausted": 21741, "gladi": 21742, "pdt": 21743, "spheric": 21744, "antiqu": 21745, "guitar": 21746, "sturi": 21747, "hopper": 21748, "angle": 21749, "fills": 21750, "slap": 21751, "mith": 21752, "rodney": 21753, "ongi": 21754, "insom": 21755, "preventing": 21756, "cassidy": 21757, "apho": 21758, "oregon": 21759, "loin": 21760, "hammond": 21761, "contributing": 21762, "fn": 21763, "garri": 21764, "orion": 21765, "compelling": 21766, "escaping": 21767, "aiming": 21768, "plumb": 21769, "bistro": 21770, "beasts": 21771, "concerning": 21772, "boe": 21773, "dopp": 21774, "shoplocal": 21775, "stumbled": 21776, "âĤ¹": 21777, "nazis": 21778, "âĢįâĻĤï¸ı": 21779, "gesture": 21780, "warts": 21781, "usopen": 21782, "higgins": 21783, "charli": 21784, "hangs": 21785, "bombers": 21786, "°:": 21787, "feeds": 21788, "cch": 21789, "stil": 21790, "nicola": 21791, "ðŁĵº": 21792, "clamation": 21793, "tropic": 21794, "afro": 21795, "ouk": 21796, "expenses": 21797, "derrick": 21798, "aline": 21799, "faw": 21800, "regard": 21801, "imer": 21802, "satin": 21803, "thium": 21804, "ryder": 21805, "pearl": 21806, "tess": 21807, "mmmmm": 21808, "senses": 21809, "ðŁĩ¹": 21810, "positive": 21811, "exhaust": 21812, "occur": 21813, "norris": 21814, "lilly": 21815, "isles": 21816, "directing": 21817, "yofficial": 21818, "countless": 21819, "samar": 21820, "onstage": 21821, "flock": 21822, "mirrors": 21823, "archer": 21824, "moi": 21825, "kd": 21826, "viv": 21827, "inos": 21828, "sikh": 21829, "lei": 21830, "sensory": 21831, "brits": 21832, "knox": 21833, "chestnut": 21834, "opy": 21835, "coliseum": 21836, "zaf": 21837, "divin": 21838, "adapter": 21839, ":)))": 21840, "temple": 21841, "kun": 21842, "helmets": 21843, "tdf": 21844, "guide": 21845, "mold": 21846, "oids": 21847, "luther": 21848, "heis": 21849, "monastery": 21850, "spree": 21851, "klu": 21852, "britney": 21853, "jaguars": 21854, "greats": 21855, "ccc": 21856, "kyrie": 21857, "machinery": 21858, "cricket": 21859, "rero": 21860, "abo": 21861, "aspiring": 21862, "semifinals": 21863, "aless": 21864, "signatures": 21865, "vard": 21866, "meth": 21867, "herbal": 21868, "holden": 21869, "kingdom": 21870, "apor": 21871, "reggie": 21872, "oreo": 21873, "palestinians": 21874, "emmys": 21875, "sectional": 21876, "roi": 21877, "neymar": 21878, "quel": 21879, "cull": 21880, "lka": 21881, "hazel": 21882, "estimate": 21883, "ulties": 21884, "gow": 21885, "bea": 21886, "purchases": 21887, "belts": 21888, "protects": 21889, "mé": 21890, "guessing": 21891, "bbo": 21892, "claudia": 21893, "fracking": 21894, "jonny": 21895, "elk": 21896, "celtic": 21897, "almighty": 21898, "raje": 21899, "courtyard": 21900, "igi": 21901, "canes": 21902, "ðŁēªðŁı»": 21903, "bankrup": 21904, "lethal": 21905, "âľĮï¸ı": 21906, "graphicdesign": 21907, "vader": 21908, "pencils": 21909, "roughly": 21910, "dante": 21911, "mfg": 21912, "constell": 21913, "camel": 21914, "jb": 21915, "blossoms": 21916, "ento": 21917, "balochistan": 21918, "cinemato": 21919, "illard": 21920, "jersey": 21921, "consent": 21922, "dented": 21923, "contempl": 21924, "scher": 21925, "holi": 21926, "lough": 21927, "stour": 21928, "ayo": 21929, "beginners": 21930, "curb": 21931, "vhs": 21932, "ajax": 21933, "duff": 21934, "aveng": 21935, "domest": 21936, "committing": 21937, "aired": 21938, "chap": 21939, "hedgehog": 21940, "disappointing": 21941, "freelance": 21942, "inland": 21943, "charms": 21944, "ðŁĺįâĿ¤ï¸ı": 21945, "aish": 21946, "mx": 21947, "buckle": 21948, "tidal": 21949, "permit": 21950, "boating": 21951, "racha": 21952, "kendrick": 21953, "bello": 21954, "bhi": 21955, "plea": 21956, "estimates": 21957, "lb": 21958, "apologies": 21959, "jaya": 21960, "bbl": 21961, "astoni": 21962, "interstate": 21963, "maintaining": 21964, "elbow": 21965, "mup": 21966, "epit": 21967, "ðŁĺ”": 21968, "violations": 21969, "defend": 21970, "beh": 21971, "slc": 21972, "amir": 21973, "puri": 21974, "tium": 21975, "fifa": 21976, "blurry": 21977, "scrim": 21978, "ðŁĻıðŁı¾": 21979, "maple": 21980, "relatives": 21981, "âĺĿ": 21982, "choc": 21983, "connor": 21984, "⾨⾨": 21985, "whisp": 21986, "listings": 21987, "maze": 21988, "thanking": 21989, "ridd": 21990, "grassroots": 21991, "shifting": 21992, "desperately": 21993, "gorilla": 21994, "deni": 21995, "jules": 21996, "strath": 21997, "gley": 21998, "jain": 21999, "buick": 22000, "tanner": 22001, "ðŁēĿ": 22002, "gae": 22003, "prim": 22004, "itors": 22005, "nano": 22006, "separation": 22007, "armenia": 22008, "bordeaux": 22009, "ðŁħ": 22010, "pjnet": 22011, "burial": 22012, "ebon": 22013, "gloss": 22014, "renew": 22015, "grier": 22016, "speeds": 22017, "comicbooks": 22018, "symboli": 22019, "purposes": 22020, "ãħłãħł": 22021, "spatial": 22022, "notable": 22023, "cion": 22024, "nps": 22025, "hoffman": 22026, "norman": 22027, "rtg": 22028, "dusty": 22029, "situated": 22030, "tran": 22031, "kfc": 22032, "emen": 22033, "nickel": 22034, "hastings": 22035, "settling": 22036, "grit": 22037, "lena": 22038, "waw": 22039, "arts": 22040, "gum": 22041, "caregi": 22042, "lewis": 22043, "sapphire": 22044, "remember": 22045, "embedded": 22046, "tlc": 22047, "blat": 22048, "sergeant": 22049, "elsa": 22050, "bootcamp": 22051, "bowman": 22052, "photographic": 22053, "pillars": 22054, "directioners": 22055, "classified": 22056, "nois": 22057, "veer": 22058, "barrels": 22059, "whoop": 22060, "ðŁĺ±ðŁĺ±": 22061, "female": 22062, "petroleum": 22063, "media": 22064, "efc": 22065, "pokémon": 22066, "à¤ķ": 22067, "enthusiastic": 22068, "varun": 22069, "profiles": 22070, "pediatric": 22071, "accidents": 22072, "conrad": 22073, "jang": 22074, "jojo": 22075, "acor": 22076, "observer": 22077, "lf": 22078, "livestock": 22079, "forgi": 22080, "fos": 22081, "elm": 22082, "anand": 22083, "goe": 22084, "cere": 22085, "avoiding": 22086, "grit": 22087, "oman": 22088, "thankfully": 22089, "scattered": 22090, "nicky": 22091, "cylinder": 22092, "cheesy": 22093, "diver": 22094, "mahesh": 22095, "caves": 22096, "earliest": 22097, "quinte": 22098, "subjects": 22099, "bend": 22100, "gulf": 22101, "vocalist": 22102, "glue": 22103, "patches": 22104, "unstopp": 22105, "snyder": 22106, "demonstrating": 22107, "pio": 22108, "horns": 22109, "wickets": 22110, "andthe": 22111, "rama": 22112, "yoon": 22113, "straight": 22114, "bedtime": 22115, "orang": 22116, "bullets": 22117, "saurus": 22118, "miners": 22119, "incidents": 22120, "!...": 22121, "ðŁİ¸": 22122, "agers": 22123, "handles": 22124, "states": 22125, "inity": 22126, "dons": 22127, "incredible": 22128, "eminem": 22129, "aviv": 22130, "rudy": 22131, "mozart": 22132, "folklore": 22133, "appliances": 22134, "mtl": 22135, "frey": 22136, "dias": 22137, "hua": 22138, "pageant": 22139, "strive": 22140, "imprison": 22141, "bullish": 22142, "rana": 22143, "alerts": 22144, "bbmas": 22145, "hyper": 22146, "derbyshire": 22147, "recre": 22148, "redd": 22149, "deborah": 22150, "cosmos": 22151, "lawson": 22152, "melanie": 22153, "psycho": 22154, "hoor": 22155, "doodles": 22156, "sniper": 22157, "shady": 22158, "mantle": 22159, "canadian": 22160, "newyear": 22161, "interactions": 22162, "separated": 22163, "cords": 22164, "spirituality": 22165, "apu": 22166, "ito": 22167, "pct": 22168, "pelosi": 22169, "rebellion": 22170, "seiz": 22171, "worcester": 22172, "sectors": 22173, "uli": 22174, "santa": 22175, "е": 22176, "ðŁĩªðŁĩ¸": 22177, "biased": 22178, "classical": 22179, "gamma": 22180, "deeplear": 22181, "emerge": 22182, "backer": 22183, "surance": 22184, "handcrafted": 22185, "ðŁİ„": 22186, "francis": 22187, "millan": 22188, "ici": 22189, "crown": 22190, "wow": 22191, "striped": 22192, "unfair": 22193, "relaxation": 22194, "³ï¸ı": 22195, "embracing": 22196, "shealth": 22197, "paleo": 22198, "martini": 22199, "distillery": 22200, "wrink": 22201, "ork": 22202, "nath": 22203, "hayley": 22204, "courthouse": 22205, "siber": 22206, "sadi": 22207, "quietly": 22208, "melt": 22209, "msm": 22210, "meh": 22211, "smartphones": 22212, "relent": 22213, "pping": 22214, "warwick": 22215, "cologne": 22216, "glia": 22217, "cotton": 22218, "prog": 22219, "lone": 22220, "ipsw": 22221, "starters": 22222, "expands": 22223, "ump": 22224, "sued": 22225, "skipper": 22226, "infections": 22227, "ingle": 22228, "Ô": 22229, "clerk": 22230, "demonstrate": 22231, "acar": 22232, "ðŁĺĤðŁĺĤðŁĺĤ": 22233, "tibet": 22234, "buns": 22235, "alom": 22236, "demolition": 22237, "ssia": 22238, "gst": 22239, "[]": 22240, "soar": 22241, "âĺĢ": 22242, "ðŁĺª": 22243, "ðŁĵĬ": 22244, "deepest": 22245, "beyond": 22246, "aret": 22247, "attends": 22248, "activated": 22249, "dimit": 22250, "âļªï¸ı": 22251, "highlighted": 22252, "magazines": 22253, "rumor": 22254, "azza": 22255, "stephens": 22256, "dolph": 22257, "shockey": 22258, "mats": 22259, "weav": 22260, "melan": 22261, "servers": 22262, "traum": 22263, "kush": 22264, "æĹ": 22265, "babys": 22266, "paz": 22267, "aal": 22268, "lause": 22269, "breakers": 22270, "canterbury": 22271, "ulture": 22272, "miri": 22273, "euros": 22274, "taneous": 22275, "impressions": 22276, "dutch": 22277, "ild": 22278, "ghi": 22279, "purdue": 22280, "adequate": 22281, "lp": 22282, "syner": 22283, "angler": 22284, "durable": 22285, "galore": 22286, "rown": 22287, "mgmt": 22288, "ðŁĵĮ": 22289, "lucia": 22290, "âĺijï¸ı": 22291, "zayn": 22292, "borrow": 22293, ".(": 22294, "northumber": 22295, "crush": 22296, "enga": 22297, "sush": 22298, "extravag": 22299, "tout": 22300, "mahal": 22301, "alistic": 22302, "thermo": 22303, "galleries": 22304, "esse": 22305, "chibi": 22306, "attractions": 22307, "lexington": 22308, "legislature": 22309, "documented": 22310, "residen": 22311, "brownies": 22312, "wf": 22313, "stool": 22314, "planets": 22315, "shoppers": 22316, "conductor": 22317, "msp": 22318, "tricky": 22319, "fruity": 22320, "endra": 22321, "feelthe": 22322, "whipped": 22323, "hairstyle": 22324, "refer": 22325, "ook": 22326, "octopus": 22327, "audiences": 22328, "kumar": 22329, "afterno": 22330, "optim": 22331, "cfl": 22332, "nip": 22333, "geni": 22334, "alphabet": 22335, "annab": 22336, "lamin": 22337, "accepts": 22338, "lng": 22339, "ðŁĺ«": 22340, "tine": 22341, "acom": 22342, "cheerleaders": 22343, "tk": 22344, "gron": 22345, "vg": 22346, "kung": 22347, "jax": 22348, "dhabi": 22349, "rss": 22350, "mackenzie": 22351, "beirut": 22352, "cleanup": 22353, "gypsy": 22354, "stell": 22355, "burger": 22356, "hurricanes": 22357, "education": 22358, "stina": 22359, "âĻ”âĻ”": 22360, "unfortunate": 22361, "jeremi": 22362, "badger": 22363, "aters": 22364, ":â̦": 22365, "terra": 22366, "sublime": 22367, "stud": 22368, "ymca": 22369, "mru": 22370, "duterte": 22371, "brennan": 22372, "bulb": 22373, "melo": 22374, "ylon": 22375, "hacker": 22376, "cred": 22377, "gud": 22378, "asan": 22379, "padilla": 22380, "embroidered": 22381, "vietnamese": 22382, "pioneers": 22383, "projection": 22384, "reboot": 22385, "idc": 22386, "aney": 22387, "primer": 22388, "suffers": 22389, "winding": 22390, "pon": 22391, "stoday": 22392, "morn": 22393, "uch": 22394, "allin": 22395, "adidas": 22396, "elizabeth": 22397, "tuck": 22398, "ography": 22399, "ðŁļĢ": 22400, "beg": 22401, "osborne": 22402, "ghetto": 22403, "rh": 22404, "cnn": 22405, "irma": 22406, "makin": 22407, "cables": 22408, "murders": 22409, "ocks": 22410, "insta": 22411, "alas": 22412, "sik": 22413, "cuff": 22414, "lare": 22415, "foodies": 22416, "ovic": 22417, "atom": 22418, "geometric": 22419, "empathy": 22420, "ี": 22421, "centenary": 22422, "newspapers": 22423, "administrative": 22424, "ðŁİĬ": 22425, "stive": 22426, "contractors": 22427, "lett": 22428, "tasmania": 22429, "awesomeness": 22430, "density": 22431, "veen": 22432, "princeton": 22433, "frequently": 22434, "reject": 22435, "ghi": 22436, "modular": 22437, "ceramics": 22438, "shag": 22439, "kiwi": 22440, "canvas": 22441, "sweatshirt": 22442, "anj": 22443, "timm": 22444, "napoli": 22445, "iler": 22446, "appeals": 22447, "hamilton": 22448, "mayo": 22449, "weave": 22450, "arranged": 22451, "wharf": 22452, "occupy": 22453, "bvb": 22454, "asaki": 22455, "otter": 22456, "norm": 22457, "vies": 22458, "detox": 22459, "tional": 22460, "derek": 22461, "idad": 22462, "admissions": 22463, "constituency": 22464, "upper": 22465, "woot": 22466, "alloy": 22467, "seve": 22468, "lub": 22469, "uncomfortable": 22470, "edwin": 22471, "abre": 22472, "dwight": 22473, "arche": 22474, "virtually": 22475, "spol": 22476, "prie": 22477, "aii": 22478, "err": 22479, "switch": 22480, "barack": 22481, "seok": 22482, "coul": 22483, "wnt": 22484, "poul": 22485, "olive": 22486, "caffeine": 22487, "cardiff": 22488, "notorious": 22489, "demp": 22490, "excess": 22491, "barr": 22492, "tford": 22493, "ajay": 22494, "bumped": 22495, "mythology": 22496, "shelley": 22497, "falcon": 22498, "shakespeare": 22499, "mustangs": 22500, "noted": 22501, "bone": 22502, "civilization": 22503, "syd": 22504, "parsons": 22505, "unofficial": 22506, "hyped": 22507, "spends": 22508, "opposed": 22509, "vings": 22510, "spacex": 22511, "notification": 22512, "deciding": 22513, "biotech": 22514, "outsi": 22515, "salah": 22516, "!.": 22517, "fed": 22518, "ssy": 22519, "cms": 22520, "badgers": 22521, "cro": 22522, "elaine": 22523, "nba": 22524, "dyour": 22525, "nant": 22526, "honeymoon": 22527, "climbed": 22528, "conomy": 22529, "atha": 22530, "mell": 22531, "nebula": 22532, "naturephotography": 22533, "julie": 22534, "bmx": 22535, "invested": 22536, "mono": 22537, "lieutenant": 22538, "watkins": 22539, "technician": 22540, "ose": 22541, "kae": 22542, "ìĽ": 22543, "mcqueen": 22544, "preach": 22545, "traveller": 22546, "flexibility": 22547, "zebra": 22548, "retailer": 22549, "pant": 22550, "bender": 22551, "brandt": 22552, "squid": 22553, "warrant": 22554, "verified": 22555, "cass": 22556, "piercing": 22557, "honours": 22558, "tying": 22559, "morris": 22560, "kissed": 22561, "oprah": 22562, "panoramic": 22563, "mei": 22564, "splatoon": 22565, "wichita": 22566, "arias": 22567, "galli": 22568, "indyref": 22569, "goodtimes": 22570, "atheist": 22571, "confession": 22572, "owski": 22573, "repping": 22574, "additions": 22575, "mechanism": 22576, "zim": 22577, "jans": 22578, "suf": 22579, "chopped": 22580, "beginnings": 22581, "vitamins": 22582, "ãħ¤ãħ¤": 22583, "orth": 22584, "poles": 22585, "rub": 22586, "antarctica": 22587, "indiefilm": 22588, "webcam": 22589, "ketch": 22590, "brett": 22591, "clement": 22592, "heron": 22593, "defeating": 22594, "hydro": 22595, "bucket": 22596, "wandering": 22597, "sidney": 22598, "futureof": 22599, "binge": 22600, "onies": 22601, "knockout": 22602, "administrator": 22603, "synthe": 22604, "lent": 22605, "jani": 22606, "barley": 22607, "premierleague": 22608, "nerds": 22609, "crm": 22610, "bras": 22611, "botany": 22612, "evolved": 22613, "rotter": 22614, "rowed": 22615, "tumor": 22616, "wealthy": 22617, "Ć‚Åƒ": 22618, "monarch": 22619, "lished": 22620, "dahl": 22621, "ðŁİĄ": 22622, "buch": 22623, "kenyan": 22624, "ا": 22625, "redness": 22626, "assembled": 22627, "semit": 22628, "hudder": 22629, "shrop": 22630, "rani": 22631, "learning": 22632, "mory": 22633, "itia": 22634, "geographic": 22635, "worldof": 22636, "fb": 22637, "phosp": 22638, "boogie": 22639, "amped": 22640, "?...": 22641, "chew": 22642, "dwarf": 22643, "arus": 22644, "ssen": 22645, "rusty": 22646, "recruits": 22647, "hk": 22648, "garde": 22649, "applause": 22650, "volumes": 22651, "involves": 22652, "tac": 22653, "handbag": 22654, "translate": 22655, "ffel": 22656, "seym": 22657, "aquatic": 22658, "transfer": 22659, "zodi": 22660, "andr": 22661, "academia": 22662, "crater": 22663, "tez": 22664, "arse": 22665, "adapt": 22666, "coloni": 22667, "snowman": 22668, "mali": 22669, "hangin": 22670, "dischar": 22671, "oysters": 22672, "phoe": 22673, "colonel": 22674, "wba": 22675, "hispanic": 22676, "thriving": 22677, "shy": 22678, "agles": 22679, "salesforce": 22680, "creme": 22681, "soles": 22682, "lafayette": 22683, "âī": 22684, "teria": 22685, "acha": 22686, "sperson": 22687, "gogo": 22688, "carly": 22689, "theore": 22690, "amore": 22691, "vox": 22692, "aft": 22693, "ãĤ¹": 22694, "staple": 22695, "muffin": 22696, "diagram": 22697, "inox": 22698, "sustained": 22699, "avent": 22700, "meta": 22701, "arbitr": 22702, "decay": 22703, "adole": 22704, "н": 22705, "ecol": 22706, "pho": 22707, "nk": 22708, "ocu": 22709, "granny": 22710, "ça": 22711, "luxembour": 22712, "stadt": 22713, "alberto": 22714, "levit": 22715, "amas": 22716, "dx": 22717, "orphan": 22718, "cobb": 22719, "asc": 22720, "logy": 22721, "immense": 22722, "chants": 22723, "offline": 22724, "pent": 22725, "brex": 22726, "winger": 22727, "plane": 22728, "iel": 22729, "nichols": 22730, "cathy": 22731, "naruto": 22732, "lowed": 22733, "///": 22734, "ignorance": 22735, "catastro": 22736, "youts": 22737, "schen": 22738, "build": 22739, "hazi": 22740, "sine": 22741, "criticalrole": 22742, "dug": 22743, "detect": 22744, "logs": 22745, "enamel": 22746, "stpatricksday": 22747, "eddie": 22748, "copa": 22749, "cigarettes": 22750, "hoff": 22751, "kaya": 22752, "lagoon": 22753, "rapha": 22754, "airborne": 22755, "choose": 22756, "puertor": 22757, "kev": 22758, "guiding": 22759, "frosty": 22760, "borough": 22761, "mira": 22762, "ðŁİĬ": 22763, "cadet": 22764, "anush": 22765, "yogi": 22766, "eger": 22767, "fling": 22768, "slope": 22769, "ninth": 22770, "weston": 22771, "footwear": 22772, "fn": 22773, "mayweather": 22774, "aam": 22775, "plain": 22776, "staircase": 22777, "witnesses": 22778, "workouts": 22779, "robust": 22780, "dexter": 22781, "cohort": 22782, "ðŁļĹ": 22783, "spell": 22784, "haze": 22785, "oom": 22786, "organising": 22787, "wildfire": 22788, "contacts": 22789, "avon": 22790, "mino": 22791, "updating": 22792, "ðŁį»": 22793, "lithium": 22794, "ingual": 22795, "kis": 22796, "auga": 22797, "locom": 22798, "deduc": 22799, "uda": 22800, "thak": 22801, "boyle": 22802, "mper": 22803, "hottie": 22804, "erik": 22805, "revised": 22806, "isla": 22807, "travelphotography": 22808, "ooza": 22809, "enqui": 22810, "conferences": 22811, "clover": 22812, "groom": 22813, "curves": 22814, "liveon": 22815, "perf": 22816, "displaced": 22817, "bolog": 22818, "xxxx": 22819, "ðŁĺ©ðŁĺ©": 22820, "teal": 22821, "vessels": 22822, "rainforest": 22823, "calci": 22824, "panther": 22825, "giraffe": 22826, "tasted": 22827, "imagery": 22828, "padres": 22829, "daytime": 22830, "bass": 22831, "ripe": 22832, "opioid": 22833, "nue": 22834, "vinyl": 22835, "inventor": 22836, "sens": 22837, "processor": 22838, "mut": 22839, "gadgets": 22840, "biblical": 22841, "shannon": 22842, "jacqueline": 22843, "cary": 22844, "theresistance": 22845, "alien": 22846, "nvi": 22847, "cosy": 22848, "bihar": 22849, "foley": 22850, "rend": 22851, "mugs": 22852, "faken": 22853, "clone": 22854, "niallo": 22855, "grabbed": 22856, "chihu": 22857, "powerhouse": 22858, "ntt": 22859, "cherokee": 22860, "sponge": 22861, "implementing": 22862, "rhine": 22863, "leone": 22864, "ðŁįĢ": 22865, "prettiest": 22866, "infrared": 22867, "improv": 22868, "switched": 22869, "tubes": 22870, "contr": 22871, "blk": 22872, "projected": 22873, "beaver": 22874, "yot": 22875, "bbcradio": 22876, "thigh": 22877, "persecu": 22878, "apologize": 22879, "wack": 22880, "poster": 22881, "oliver": 22882, "aza": 22883, "loud": 22884, "(?)": 22885, "fthe": 22886, "womenshi": 22887, "sparrow": 22888, "blush": 22889, "usable": 22890, "scales": 22891, "itative": 22892, "peuge": 22893, "needing": 22894, "leggings": 22895, "glamorous": 22896, "matur": 22897, "cz": 22898, "watt": 22899, "dab": 22900, "tamar": 22901, "etsym": 22902, "bauer": 22903, "heartfelt": 22904, "hn": 22905, "elsewhere": 22906, "birch": 22907, "alumini": 22908, "huck": 22909, "eme": 22910, "jl": 22911, "trafford": 22912, "dz": 22913, "portions": 22914, "anasta": 22915, "arthritis": 22916, "espn": 22917, "bergen": 22918, "violation": 22919, "yoshi": 22920, "cz": 22921, "northumberland": 22922, "closures": 22923, "ðŁĩ¯ðŁĩ": 22924, "smiley": 22925, "rw": 22926, "telugu": 22927, "intensi": 22928, "gregg": 22929, "vega": 22930, "dungeon": 22931, "southbound": 22932, "bail": 22933, "dominican": 22934, "semifinal": 22935, "chapters": 22936, "hitch": 22937, "vanity": 22938, "transiti": 22939, "recommends": 22940, "satisf": 22941, "barca": 22942, "queens": 22943, "((": 22944, "destruc": 22945, "strait": 22946, "ravi": 22947, "desserts": 22948, "intru": 22949, "haram": 22950, "kos": 22951, "foe": 22952, "fatty": 22953, "paisley": 22954, "magnitude": 22955, "dridge": 22956, "comey": 22957, "schemes": 22958, "visionary": 22959, "ourt": 22960, "downloaded": 22961, "ðŁĻĮðŁı½": 22962, "gdpr": 22963, "lani": 22964, "pwc": 22965, "guad": 22966, "nicest": 22967, "stakeholders": 22968, "referred": 22969, "georgetown": 22970, "arvindkejriwal": 22971, "schneider": 22972, "indoors": 22973, "allstar": 22974, "stranded": 22975, "gender": 22976, "zepp": 22977, "masses": 22978, "ðŁIJ±": 22979, "patiently": 22980, "bldg": 22981, "zab": 22982, "wearab": 22983, "vivid": 22984, "heck": 22985, "della": 22986, "symb": 22987, "jeopar": 22988, "lager": 22989, "Ć ĀŖ": 22990, "combines": 22991, "nec": 22992, "bray": 22993, "flop": 22994, "txwx": 22995, "joys": 22996, "pont": 22997, "profound": 22998, "surround": 22999, "madhu": 23000, "mable": 23001, "ayr": 23002, "teas": 23003, "nsa": 23004, "openly": 23005, "ernest": 23006, "ãĄ©": 23007, "topo": 23008, "gna": 23009, "antioxid": 23010, "tian": 23011, "etr": 23012, "cello": 23013, "mathi": 23014, "generosity": 23015, "biting": 23016, "manic": 23017, "kelsey": 23018, "cheeks": 23019, "tender": 23020, "wth": 23021, "pronoun": 23022, "ultimately": 23023, "gusta": 23024, "arianag": 23025, "gerry": 23026, "bleed": 23027, "reddy": 23028, "mich": 23029, "mitsubishi": 23030, "operated": 23031, "sexually": 23032, "mau": 23033, "cllr": 23034, "vids": 23035, "coc": 23036, "melted": 23037, "ðŁĮĪ": 23038, "qld": 23039, "itech": 23040, "instrumental": 23041, "endgame": 23042, "ðŁĵĸ": 23043, "energi": 23044, "brownie": 23045, "tamil": 23046, "atin": 23047, "dominated": 23048, "praises": 23049, "fireplace": 23050, "sensational": 23051, "mena": 23052, "karti": 23053, "unprece": 23054, "rupt": 23055, "oriental": 23056, "mccor": 23057, "tournaments": 23058, "scenter": 23059, "reeves": 23060, "prescription": 23061, "same": 23062, "frau": 23063, "truffle": 23064, "embo": 23065, "romans": 23066, "blasts": 23067, "technological": 23068, "prat": 23069, "bsb": 23070, "yar": 23071, "trendy": 23072, "acl": 23073, "alad": 23074, "ðŁįģ": 23075, "ohh": 23076, "bankrupt": 23077, "thoven": 23078, "regards": 23079, "iser": 23080, "warwick": 23081, "vineyards": 23082, "realm": 23083, "niallofficial": 23084, "dota": 23085, "gemini": 23086, "todo": 23087, "vable": 23088, "¨¨": 23089, "lau": 23090, "wreath": 23091, "juve": 23092, "natasha": 23093, "lever": 23094, "lori": 23095, "horser": 23096, "cctv": 23097, "airbnb": 23098, "esanders": 23099, "sinclair": 23100, "emabiggest": 23101, "highschool": 23102, "contest": 23103, "optimistic": 23104, "tte": 23105, "ðŁēķðŁēķ": 23106, "ssd": 23107, "yee": 23108, "helena": 23109, "consen": 23110, "ricks": 23111, "jesse": 23112, "anic": 23113, "ðŁİ¯": 23114, "reacts": 23115, "robe": 23116, "independence": 23117, "voltage": 23118, "mington": 23119, "sant": 23120, "à¸Ļà¸": 23121, "----------------": 23122, "sentinel": 23123, "kett": 23124, "rehearsing": 23125, "aaaaaaaa": 23126, "softhe": 23127, "stirling": 23128, "search": 23129, "wigan": 23130, "standout": 23131, "snail": 23132, "pentagon": 23133, "Äģ": 23134, "chlor": 23135, "crust": 23136, "netany": 23137, "chemist": 23138, "disappeared": 23139, "ricardo": 23140, "spiders": 23141, "bose": 23142, "warren": 23143, "messing": 23144, "banners": 23145, "guel": 23146, "parach": 23147, "maid": 23148, "counted": 23149, "epile": 23150, "bonfire": 23151, "speechless": 23152, "setter": 23153, "measured": 23154, "rejects": 23155, "nikki": 23156, "lester": 23157, "forensic": 23158, "fabrics": 23159, "aloha": 23160, "preserved": 23161, "watford": 23162, "detailing": 23163, "darth": 23164, "bou": 23165, "carly": 23166, "...'": 23167, "tailgate": 23168, "notifications": 23169, "Ĥ": 23170, "passive": 23171, "trousers": 23172, "baloch": 23173, "rother": 23174, "typically": 23175, "Ä": 23176, "spit": 23177, "wiz": 23178, "sicily": 23179, "technically": 23180, "expose": 23181, "stage": 23182, "hubb": 23183, "cream": 23184, "caps": 23185, "poke": 23186, "sleek": 23187, "june": 23188, "temporarily": 23189, "dez": 23190, "awakens": 23191, "lame": 23192, "_-": 23193, "jiha": 23194, "tuesdays": 23195, "advised": 23196, "advisors": 23197, "existed": 23198, "disagree": 23199, "newsroom": 23200, "losers": 23201, "worldtour": 23202, "drying": 23203, "aldi": 23204, "harness": 23205, "footprint": 23206, "hobbit": 23207, "pmln": 23208, "iro": 23209, "quered": 23210, "assess": 23211, "gaze": 23212, "sab": 23213, "thian": 23214, "ƭĬ": 23215, "tif": 23216, "observe": 23217, "evil": 23218, "drawer": 23219, "sweep": 23220, "cory": 23221, "cody": 23222, "kyoto": 23223, "callum": 23224, "ninj": 23225, "laurent": 23226, "bei": 23227, "sketching": 23228, "customized": 23229, "dur": 23230, "regrets": 23231, "knoxville": 23232, "ƬķĦ": 23233, "messaging": 23234, "gracie": 23235, "abundance": 23236, "bidding": 23237, "brewed": 23238, "flouri": 23239, "therapeutic": 23240, "altitude": 23241, "hogs": 23242, "burner": 23243, "electro": 23244, "wonderfully": 23245, "heater": 23246, "postpon": 23247, "livery": 23248, "rall": 23249, "adas": 23250, "aac": 23251, "saul": 23252, "brooklyn": 23253, "playhouse": 23254, "âĻ„âĻ„âĻ„": 23255, "charitable": 23256, "iny": 23257, "zah": 23258, "competitions": 23259, "beav": 23260, "plugged": 23261, "ois": 23262, "doom": 23263, "astronom": 23264, "specialized": 23265, "maxi": 23266, "taps": 23267, "cellular": 23268, "depressed": 23269, "folklorethursday": 23270, "crib": 23271, "emul": 23272, "ë°©": 23273, "figh": 23274, "ruz": 23275, "carlisle": 23276, "spear": 23277, "sidewalk": 23278, "dei": 23279, "dependent": 23280, "laces": 23281, "nhs": 23282, "ðŁĮĻ": 23283, "realizing": 23284, "network": 23285, "riche": 23286, "regin": 23287, "refresh": 23288, "stral": 23289, "pathology": 23290, "plaid": 23291, "psychedelic": 23292, "hind": 23293, "uka": 23294, "algorithm": 23295, "linking": 23296, "progressi": 23297, "fey": 23298, "dade": 23299, "hydrated": 23300, "bant": 23301, "famed": 23302, "cotsw": 23303, "boise": 23304, "asc": 23305, "racing": 23306, "javier": 23307, "wwen": 23308, "marlins": 23309, "poop": 23310, "swept": 23311, "tonights": 23312, "wef": 23313, "anime": 23314, "slovak": 23315, "âŀĸâŀĸ": 23316, "claus": 23317, "lemme": 23318, "clippers": 23319, "rels": 23320, "arianagrande": 23321, "rte": 23322, "kot": 23323, "thalapathy": 23324, "hungarian": 23325, "zuma": 23326, "yvon": 23327, "isu": 23328, "journeys": 23329, "clinics": 23330, "bebe": 23331, "wwf": 23332, "nws": 23333, "superheroes": 23334, "erit": 23335, "sleague": 23336, "identification": 23337, "motto": 23338, "bai": 23339, "sourced": 23340, "iller": 23341, "api": 23342, "prise": 23343, "unprecedented": 23344, "damas": 23345, "tunisia": 23346, "drain": 23347, "underestim": 23348, "ether": 23349, "quarterly": 23350, "rewarding": 23351, "alham": 23352, "wolverine": 23353, "cabine": 23354, "hypno": 23355, "nadine": 23356, "havana": 23357, "dae": 23358, "ðŁĵĪ": 23359, "dron": 23360, "readings": 23361, "bati": 23362, "pico": 23363, "merci": 23364, "itian": 23365, "walkers": 23366, "elope": 23367, "mikey": 23368, "godzilla": 23369, "burlington": 23370, "abuja": 23371, "socialism": 23372, "atility": 23373, "shell": 23374, "harrypotter": 23375, "gno": 23376, "abur": 23377, "releg": 23378, "felici": 23379, "rogen": 23380, "neuroscience": 23381, "instin": 23382, "atham": 23383, "vouchers": 23384, "jarre": 23385, "fuse": 23386, "defici": 23387, "monterey": 23388, "deport": 23389, "midday": 23390, "ppard": 23391, "freed": 23392, "ameter": 23393, "wilt": 23394, "ningham": 23395, "pratt": 23396, "liberty": 23397, "slogan": 23398, "oto": 23399, "pri": 23400, "coated": 23401, "cpd": 23402, "nett": 23403, "illas": 23404, "malawi": 23405, "evolve": 23406, "accessibility": 23407, "ðŁĶ„ðŁĶ„ðŁĶ„ðŁĶ„": 23408, "ornament": 23409, "bp": 23410, "elis": 23411, "sonline": 23412, "chiro": 23413, "flick": 23414, "ibm": 23415, "arak": 23416, "enables": 23417, "garland": 23418, "sane": 23419, "cuties": 23420, "trip": 23421, "rotterdam": 23422, "nys": 23423, "lamps": 23424, "lucas": 23425, "bog": 23426, "rails": 23427, "travelled": 23428, "hicks": 23429, "enu": 23430, "sabha": 23431, "scrub": 23432, "hier": 23433, "hartford": 23434, "foo": 23435, "fernandez": 23436, "trevor": 23437, "mattress": 23438, "appointments": 23439, "alej": 23440, "fei": 23441, "ologist": 23442, "safar": 23443, "octa": 23444, "src": 23445, "shaun": 23446, "ambient": 23447, "dric": 23448, "biker": 23449, "shee": 23450, "mustache": 23451, "hta": 23452, "boone": 23453, "herty": 23454, "cardio": 23455, "brakes": 23456, "recital": 23457, "consists": 23458, "overwhelmed": 23459, "caul": 23460, "robbins": 23461, "imit": 23462, "alth": 23463, "url": 23464, "bibli": 23465, "onne": 23466, "blacklivesmatter": 23467, "difficulties": 23468, "telang": 23469, "taller": 23470, "ðŁĵĨ": 23471, "debating": 23472, "burrito": 23473, "movember": 23474, "strengthening": 23475, "boe": 23476, "testam": 23477, "miracles": 23478, "baseball": 23479, "renee": 23480, "ðŁijīðŁı»": 23481, "alfa": 23482, "âĺĺ": 23483, "unstoppable": 23484, "ecs": 23485, "gmo": 23486, "giftideas": 23487, "pathway": 23488, "fencing": 23489, "ðŁİ¤": 23490, "bham": 23491, "ras": 23492, "sko": 23493, "dled": 23494, "thelast": 23495, "magnum": 23496, "binary": 23497, "wilde": 23498, "wilder": 23499, "whati": 23500, "barbecue": 23501, "hism": 23502, "canoe": 23503, "kurdi": 23504, "elive": 23505, "advantages": 23506, "madame": 23507, "bier": 23508, "missing": 23509, "entertain": 23510, "airforce": 23511, "yama": 23512, "cis": 23513, "hashtags": 23514, "jis": 23515, "veil": 23516, "dreamy": 23517, "tense": 23518, "mayward": 23519, "chateau": 23520, "huntington": 23521, "âļĵ": 23522, "vall": 23523, "upon": 23524, "blouse": 23525, "dunes": 23526, "ðŁĺ“": 23527, "fertility": 23528, "mole": 23529, "currencies": 23530, "stu": 23531, "berlin": 23532, "toasted": 23533, "divas": 23534, "walt": 23535, "lark": 23536, "pora": 23537, "hitter": 23538, "umer": 23539, "chilled": 23540, "balancing": 23541, "fais": 23542, "yin": 23543, "ortiz": 23544, "eastenders": 23545, "hate": 23546, "ural": 23547, "april": 23548, "timel": 23549, "à±": 23550, "pero": 23551, "stocked": 23552, "respects": 23553, "tht": 23554, "bestfriends": 23555, "givingtuesday": 23556, "bead": 23557, "invent": 23558, "imi": 23559, "naples": 23560, "combining": 23561, "tokens": 23562, "thirst": 23563, "masc": 23564, "parrot": 23565, "spu": 23566, "denton": 23567, "*-*": 23568, "tres": 23569, "suburban": 23570, "width": 23571, "sive": 23572, "contender": 23573, "sirius": 23574, "lok": 23575, "troopers": 23576, "outrage": 23577, "turbo": 23578, "fragile": 23579, "messed": 23580, "doh": 23581, "discord": 23582, "netanyahu": 23583, "resign": 23584, "forgiveness": 23585, "mohan": 23586, "munch": 23587, "camou": 23588, "identifying": 23589, "enabling": 23590, "hotter": 23591, "thornton": 23592, "jaipur": 23593, "arya": 23594, "ðŁı»âĢįâĻĢï¸ı": 23595, "mustaf": 23596, "majors": 23597, "oke": 23598, "duffy": 23599, "rohing": 23600, "tilt": 23601, "ðŁĩ®ðŁĩ³": 23602, "rockstar": 23603, "sheep": 23604, "hendrix": 23605, "rav": 23606, "invention": 23607, "dou": 23608, "laguna": 23609, "grumpy": 23610, "swis": 23611, "impe": 23612, ")'": 23613, "youths": 23614, "bunker": 23615, "stache": 23616, "oppose": 23617, "indies": 23618, "accelerate": 23619, "mlp": 23620, "eden": 23621, "wann": 23622, "kail": 23623, "akshaykumar": 23624, "supt": 23625, "polym": 23626, "middleton": 23627, "extraordin": 23628, "wilson": 23629, "australian": 23630, "aluminium": 23631, "wayne": 23632, "alumnus": 23633, "matics": 23634, "grim": 23635, "ernie": 23636, "oppa": 23637, "competitors": 23638, "randall": 23639, "hence": 23640, "declares": 23641, "preaching": 23642, "shahe": 23643, "cane": 23644, "sustainable": 23645, "staples": 23646, "ledge": 23647, "adena": 23648, "doctoral": 23649, "burgundy": 23650, "decorate": 23651, "rendered": 23652, "risen": 23653, "prank": 23654, "dior": 23655, "beethoven": 23656, "floor": 23657, "accom": 23658, "tot": 23659, "hodg": 23660, "tourism": 23661, "sayin": 23662, "objective": 23663, "markers": 23664, "premiership": 23665, "enabled": 23666, "camoufla": 23667, "giant": 23668, "Ñģ": 23669, "smokey": 23670, "ricket": 23671, "pang": 23672, "depending": 23673, "sation": 23674, "evolving": 23675, "intercep": 23676, "census": 23677, "tofthe": 23678, "reen": 23679, "mendoza": 23680, "trumpet": 23681, "marketers": 23682, "anit": 23683, "ðŁĻĬ": 23684, "northwestern": 23685, "vla": 23686, "fotogra": 23687, "blackandwhite": 23688, "chewan": 23689, "wig": 23690, "troom": 23691, "gingerbread": 23692, "kn": 23693, "romero": 23694, "nfc": 23695, "orchi": 23696, "funko": 23697, "source": 23698, "fs": 23699, "raped": 23700, "ost": 23701, "tarot": 23702, "annually": 23703, "ðŁĺ¬": 23704, "rill": 23705, "delav": 23706, "..!!": 23707, "ses": 23708, "cann": 23709, "medicare": 23710, "phel": 23711, "apex": 23712, "guardian": 23713, "remained": 23714, "rpm": 23715, "añ": 23716, "storymonth": 23717, "instagood": 23718, "neighbour": 23719, "ping": 23720, "semite": 23721, "mystic": 23722, "ascot": 23723, "mater": 23724, "handful": 23725, "dangers": 23726, "tid": 23727, "anaheim": 23728, "opoly": 23729, "shallow": 23730, "namibia": 23731, "toria": 23732, "procurement": 23733, "bigbang": 23734, "announcements": 23735, "prosecutor": 23736, "bengals": 23737, "salle": 23738, "enroll": 23739, "gastro": 23740, "suggestion": 23741, "bak": 23742, "haul": 23743, "buddhism": 23744, "berniesanders": 23745, "flute": 23746, "fatigue": 23747, "cynthia": 23748, "choi": 23749, "irwin": 23750, "gua": 23751, "strous": 23752, "hp": 23753, "bap": 23754, "satisfying": 23755, "playa": 23756, "ðŁİ¼": 23757, "instap": 23758, "alice": 23759, "tp": 23760, "irrigation": 23761, "ðŁĩ¬ðŁĩ§": 23762, "intric": 23763, "clues": 23764, "plex": 23765, "sax": 23766, "hepat": 23767, "dumped": 23768, "significance": 23769, "byu": 23770, "medication": 23771, "prov": 23772, "toughest": 23773, "cornish": 23774, "âŀľ": 23775, "kelley": 23776, "uv": 23777, "sizz": 23778, "sibling": 23779, "mest": 23780, "distor": 23781, "diplomatic": 23782, "auntie": 23783, "bhat": 23784, "sonic": 23785, "brenda": 23786, "pumpkins": 23787, "roch": 23788, "blackburn": 23789, "urged": 23790, "shia": 23791, "arrangements": 23792, "flood": 23793, "saunders": 23794, "lecturer": 23795, "nouri": 23796, "populations": 23797, "diplomacy": 23798, "consistently": 23799, "ð٤Ļ": 23800, "tmund": 23801, "cauliflower": 23802, "lily": 23803, "vocabulary": 23804, "varieties": 23805, "cooker": 23806, "uptown": 23807, "quent": 23808, "mosa": 23809, "reinde": 23810, "velocity": 23811, "spruce": 23812, "socialmedi": 23813, "iber": 23814, "voluntary": 23815, "processed": 23816, "baltic": 23817, "yang": 23818, "lebanese": 23819, "dp": 23820, "dolly": 23821, "arrangement": 23822, "yuri": 23823, "cranberry": 23824, "kalyan": 23825, "elevation": 23826, "cliff": 23827, "pushes": 23828, "ìĬ¤": 23829, "silic": 23830, "cowx": 23831, "eternity": 23832, "slaves": 23833, "vinegar": 23834, "gloucester": 23835, "contained": 23836, "breakingnews": 23837, "against": 23838, "renovated": 23839, "normandy": 23840, "heroin": 23841, "ysm": 23842, "mods": 23843, "greek": 23844, "undi": 23845, "trench": 23846, "vh": 23847, "encourages": 23848, "headache": 23849, "grange": 23850, ":'": 23851, "evergreen": 23852, "ƙĬ": 23853, "reckon": 23854, "abused": 23855, "thru": 23856, "choice": 23857, "tidy": 23858, "colder": 23859, "schoice": 23860, "hain": 23861, "brum": 23862, "liars": 23863, "breit": 23864, "yorker": 23865, "shack": 23866, "heidi": 23867, "michaels": 23868, "scopic": 23869, "fascist": 23870, "playful": 23871, "cac": 23872, "yasss": 23873, "shad": 23874, "..?": 23875, "quen": 23876, "ramirez": 23877, "clifton": 23878, "prs": 23879, "bestfan": 23880, "âģł": 23881, "generating": 23882, "headset": 23883, "disappointment": 23884, "abstract": 23885, "boiled": 23886, "parenthood": 23887, "azerbaijan": 23888, "exhibiting": 23889, "bombay": 23890, "olivier": 23891, "koso": 23892, "unlea": 23893, "maternity": 23894, "izer": 23895, "sives": 23896, "rhu": 23897, "coll": 23898, "saskatchewan": 23899, "freakin": 23900, "dek": 23901, "nag": 23902, "stabili": 23903, "ðŁįķ": 23904, "organizer": 23905, "bosses": 23906, "aru": 23907, "uva": 23908, "atable": 23909, "taun": 23910, "afterwards": 23911, "fertili": 23912, "verge": 23913, "azi": 23914, "morph": 23915, "à¹ģà¸": 23916, "jerk": 23917, "cosmetic": 23918, "kow": 23919, "strust": 23920, "apache": 23921, "postcards": 23922, "formul": 23923, "Ƭĭ": 23924, "spinal": 23925, "jackpot": 23926, "electri": 23927, "ÃŃ": 23928, "loy": 23929, "grader": 23930, "diablo": 23931, "ardi": 23932, "hesit": 23933, "fw": 23934, "archery": 23935, "pash": 23936, "theories": 23937, "repeal": 23938, "relive": 23939, "percy": 23940, "âĺĨ": 23941, "imin": 23942, "synchron": 23943, "shampoo": 23944, "coupons": 23945, "oto": 23946, "lai": 23947, "thought": 23948, "luxembourg": 23949, "mov": 23950, "ðŁĺ„": 23951, "gemma": 23952, "seated": 23953, "mga": 23954, "stratford": 23955, "uncertainty": 23956, "shifts": 23957, "esto": 23958, "fool": 23959, "firearms": 23960, "corrie": 23961, "kiki": 23962, "apparent": 23963, "pills": 23964, "olympia": 23965, "fid": 23966, "elevated": 23967, "decks": 23968, "ignoring": 23969, "avalan": 23970, "rov": 23971, "whistle": 23972, "ptsd": 23973, "militants": 23974, "robotic": 23975, "pacers": 23976, "quilt": 23977, "bankruptcy": 23978, "lich": 23979, "percussion": 23980, "celebrity": 23981, "als": 23982, "(;": 23983, "sut": 23984, "pokemongo": 23985, "hg": 23986, "offs": 23987, "gibraltar": 23988, "screams": 23989, "billie": 23990, "genome": 23991, "marin": 23992, "beams": 23993, "archbishop": 23994, "emin": 23995, "bedrooms": 23996, "gated": 23997, "olly": 23998, "warranty": 23999, "atown": 24000, "cuddles": 24001, "gunna": 24002, "kic": 24003, "vive": 24004, "cymru": 24005, "narrow": 24006, "prob": 24007, "leo": 24008, "references": 24009, "manufactured": 24010, "chopper": 24011, "brunswick": 24012, "semis": 24013, "donia": 24014, "rye": 24015, "mano": 24016, "hurting": 24017, "?#": 24018, "holli": 24019, "investigations": 24020, "cels": 24021, "ðŁĵŀ": 24022, "lester": 24023, "temples": 24024, "storey": 24025, "mcmahon": 24026, "toilets": 24027, "woof": 24028, "ï¸İ": 24029, "leverage": 24030, "atom": 24031, "nightmares": 24032, "victorious": 24033, "haunting": 24034, "customer": 24035, "agi": 24036, "yoongi": 24037, "monty": 24038, "veronica": 24039, "wur": 24040, "intimid": 24041, "blankets": 24042, "volution": 24043, "jm": 24044, "âĺİ": 24045, "amon": 24046, "judith": 24047, "ðŁĺİðŁĺİ": 24048, "distracted": 24049, "drip": 24050, "hurricane": 24051, "andes": 24052, "revelation": 24053, "troop": 24054, "ableg": 24055, "collin": 24056, "tibetan": 24057, "worrying": 24058, "internationally": 24059, "eater": 24060, "cameroon": 24061, "brador": 24062, "yuk": 24063, "ðŁēĹðŁēĹ": 24064, "trak": 24065, "slopes": 24066, "cier": 24067, "nea": 24068, "oler": 24069, "taka": 24070, "albion": 24071, "volcanic": 24072, "amn": 24073, "afi": 24074, "obstac": 24075, "facetime": 24076, "gering": 24077, "npr": 24078, "metallica": 24079, "organic": 24080, "ðŁē”": 24081, "kidd": 24082, "dances": 24083, "pembro": 24084, "washer": 24085, "mits": 24086, "omer": 24087, "emotionally": 24088, "tango": 24089, "ipo": 24090, "docks": 24091, "scanning": 24092, "specs": 24093, "thom": 24094, "theology": 24095, "emergen": 24096, "omi": 24097, "gpa": 24098, "selections": 24099, "unnecessary": 24100, "image": 24101, "ters": 24102, "induced": 24103, "gigan": 24104, "rentals": 24105, "supplied": 24106, "mfa": 24107, "shankar": 24108, "later": 24109, "pajam": 24110, "clave": 24111, "Ùģ": 24112, "mahin": 24113, "carlson": 24114, "avian": 24115, "anova": 24116, "katie": 24117, "ajith": 24118, "designated": 24119, "chocolates": 24120, "investigators": 24121, "glazed": 24122, "princess": 24123, "erry": 24124, "ragn": 24125, "ourable": 24126, "hru": 24127, "sundance": 24128, "peugeot": 24129, "steampunk": 24130, "ghlin": 24131, "grease": 24132, "hires": 24133, "zap": 24134, "perce": 24135, "jill": 24136, "tome": 24137, "hehehe": 24138, "joyful": 24139, "maestro": 24140, "nished": 24141, "genealo": 24142, "vich": 24143, "pits": 24144, "foxes": 24145, "goodman": 24146, "emerson": 24147, "lobes": 24148, "converse": 24149, "oats": 24150, "thomson": 24151, "rahim": 24152, "malware": 24153, "ahi": 24154, "mankind": 24155, "resin": 24156, "img": 24157, "swood": 24158, "kinder": 24159, "scroll": 24160, "ara": 24161, "sakura": 24162, "robbed": 24163, "xion": 24164, "nya": 24165, "cism": 24166, "cedar": 24167, "bein": 24168, "mourning": 24169, "torto": 24170, "heathrow": 24171, "donegal": 24172, "barb": 24173, "hydration": 24174, "kor": 24175, "elimination": 24176, "supdates": 24177, "hills": 24178, "appeti": 24179, "starred": 24180, "kom": 24181, "gwen": 24182, "ddd": 24183, "cray": 24184, "scanner": 24185, "personalised": 24186, "serenity": 24187, "redesign": 24188, "metaph": 24189, "boxed": 24190, "judgment": 24191, "nose": 24192, "ë¹": 24193, "erad": 24194, "acne": 24195, "suppliers": 24196, "energetic": 24197, "vom": 24198, "asap": 24199, "ðŁĶ¸": 24200, "irvine": 24201, "hatch": 24202, "lass": 24203, "adren": 24204, "waffles": 24205, "accurately": 24206, "icio": 24207, "ittle": 24208, "seun": 24209, "occupy": 24210, "webcam": 24211, "thenew": 24212, "entes": 24213, "gai": 24214, "jw": 24215, "accountable": 24216, "visor": 24217, "irrit": 24218, "licensing": 24219, "huddersfield": 24220, "genie": 24221, "ðŁİ¾": 24222, "atmospheric": 24223, "tensions": 24224, "spartan": 24225, "clifford": 24226, "olan": 24227, "northbound": 24228, "ameen": 24229, "censor": 24230, "uel": 24231, "stery": 24232, "$$": 24233, "farrell": 24234, "hyster": 24235, "clt": 24236, "sedan": 24237, "replied": 24238, "describing": 24239, "microwave": 24240, "slab": 24241, "prosp": 24242, "assisting": 24243, "rubio": 24244, "ethan": 24245, "hhhhh": 24246, "guay": 24247, "zman": 24248, "raise": 24249, "rolling": 24250, "oe": 24251, "nile": 24252, "ambrose": 24253, "scarborough": 24254, "heroic": 24255, "cooks": 24256, "mort": 24257, "chopra": 24258, "ðŁĮ·": 24259, "tob": 24260, "shaving": 24261, "stacey": 24262, "dorm": 24263, "motorsports": 24264, "wiki": 24265, "folds": 24266, "spiced": 24267, "stressful": 24268, "literal": 24269, "fudge": 24270, "peggy": 24271, "waite": 24272, "tresses": 24273, "sesh": 24274, "pric": 24275, "ðŁİħ": 24276, "fright": 24277, "rva": 24278, "mumbai": 24279, "pom": 24280, "ttv": 24281, "cellar": 24282, "tome": 24283, "android": 24284, "doris": 24285, "tsunami": 24286, "tinder": 24287, "oec": 24288, "mwc": 24289, "dortmund": 24290, "nothin": 24291, "liti": 24292, "sou": 24293, "believein": 24294, "atu": 24295, "knocks": 24296, "magni": 24297, "sssss": 24298, "rohit": 24299, "inews": 24300, "angi": 24301, "mandy": 24302, "kettle": 24303, "intermediate": 24304, "avant": 24305, "curl": 24306, "endorsed": 24307, "orio": 24308, "urt": 24309, "consideration": 24310, "wires": 24311, "shelters": 24312, "bino": 24313, "vikram": 24314, "implemented": 24315, "lydia": 24316, "buk": 24317, "parody": 24318, "cnews": 24319, "undergraduate": 24320, "canucks": 24321, "sami": 24322, "politically": 24323, "rotten": 24324, "ghz": 24325, "textiles": 24326, "overload": 24327, "moderni": 24328, "recreational": 24329, "flir": 24330, "baton": 24331, "typography": 24332, "ovation": 24333, "intriguing": 24334, "pilgrimage": 24335, "alge": 24336, "adays": 24337, "tcmparty": 24338, "spelled": 24339, "curls": 24340, "booze": 24341, "stem": 24342, "annes": 24343, "irls": 24344, "sponge": 24345, "shopper": 24346, "signation": 24347, "brass": 24348, "mistress": 24349, "leah": 24350, "beginner": 24351, "lauderdale": 24352, "august": 24353, "preschool": 24354, "taping": 24355, "taipei": 24356, "executives": 24357, "bd": 24358, "rhetor": 24359, "escor": 24360, "immuno": 24361, "deeplearning": 24362, "statues": 24363, "itus": 24364, "manuscript": 24365, "lyric": 24366, "corvette": 24367, "molly": 24368, "lage": 24369, "dep": 24370, "cnbc": 24371, "lest": 24372, "jessi": 24373, "fife": 24374, "griffith": 24375, "opposing": 24376, "rang": 24377, "drills": 24378, "respectful": 24379, "pity": 24380, "dell": 24381, "harding": 24382, "playboy": 24383, "bloke": 24384, "shutout": 24385, "kili": 24386, "osp": 24387, "seattle": 24388, "bcpoli": 24389, "mises": 24390, "journals": 24391, "teaming": 24392, "esther": 24393, "freddy": 24394, "Ķï¸ı": 24395, "metrics": 24396, "notre": 24397, "garry": 24398, "forty": 24399, "navigate": 24400, "periods": 24401, "benedic": 24402, "jid": 24403, "daw": 24404, "ancestors": 24405, "restoring": 24406, "cong": 24407, "allergy": 24408, "titanium": 24409, "cence": 24410, "leaning": 24411, "abbas": 24412, "vast": 24413, "ucf": 24414, "roofing": 24415, "eman": 24416, "severely": 24417, "vogue": 24418, "veau": 24419, "inbound": 24420, "dz": 24421, "taneously": 24422, "stretching": 24423, "manchester": 24424, "dryer": 24425, "davis": 24426, "kanth": 24427, "thegame": 24428, "itted": 24429, "retain": 24430, "elles": 24431, "congestion": 24432, "fraternity": 24433, "ollie": 24434, "loki": 24435, "freely": 24436, "choo": 24437, "pony": 24438, "scep": 24439, "tably": 24440, "balt": 24441, "rockn": 24442, "dime": 24443, "logging": 24444, "ðŁį·": 24445, "adu": 24446, "havoc": 24447, "waterford": 24448, "charis": 24449, "sweetie": 24450, "running": 24451, "nerd": 24452, "erdogan": 24453, "zara": 24454, "weighing": 24455, "fifty": 24456, "precise": 24457, "lowell": 24458, "kurdistan": 24459, "ryo": 24460, "orth": 24461, "synth": 24462, "liners": 24463, "phenomenon": 24464, "artillery": 24465, "illegally": 24466, "construct": 24467, "nostalgic": 24468, "garth": 24469, "alta": 24470, "shelton": 24471, "asean": 24472, "wander": 24473, "durban": 24474, "diversi": 24475, "bono": 24476, "clon": 24477, "leman": 24478, "shun": 24479, "obstacles": 24480, "appetite": 24481, "feeder": 24482, "respiratory": 24483, "dixie": 24484, "formula": 24485, "anto": 24486, "sober": 24487, "extinct": 24488, "auc": 24489, "ingles": 24490, "legitimate": 24491, ";;": 24492, "minnie": 24493, "ipswich": 24494, "dramatically": 24495, "ðŁijıðŁı¼": 24496, "ingham": 24497, "military": 24498, "monet": 24499, "usnavy": 24500, "fork": 24501, "dunno": 24502, "player": 24503, "qotd": 24504, "stoo": 24505, "exor": 24506, "ethiopian": 24507, "filmfest": 24508, "pered": 24509, "cate": 24510, "saudi": 24511, "inner": 24512, "sincere": 24513, "tionality": 24514, "alee": 24515, "deeds": 24516, "cooperative": 24517, "ironic": 24518, "crocod": 24519, "brary": 24520, "postseason": 24521, "camper": 24522, "canary": 24523, "ein": 24524, "extensions": 24525, "nbd": 24526, "sherwood": 24527, "spokane": 24528, "hump": 24529, "jitsu": 24530, "ê¹": 24531, "daryl": 24532, "psi": 24533, "stabbed": 24534, "offerings": 24535, "expects": 24536, "caval": 24537, "bodybuilding": 24538, "framing": 24539, "fca": 24540, "yearly": 24541, "bombed": 24542, "skil": 24543, "researching": 24544, "judiciary": 24545, "greeted": 24546, "tudor": 24547, "milo": 24548, "innovate": 24549, "ðŁĺĽ": 24550, "rhs": 24551, "ruby": 24552, "contributor": 24553, "famer": 24554, "socially": 24555, "mlin": 24556, "fiery": 24557, "utter": 24558, "beaut": 24559, "itos": 24560, "devoted": 24561, "rainbow": 24562, "barney": 24563, "peren": 24564, "arjun": 24565, "rna": 24566, "gabby": 24567, "uti": 24568, "hannity": 24569, "pickle": 24570, "serv": 24571, "quakes": 24572, "ppe": 24573, "fem": 24574, "whitec": 24575, "jn": 24576, "victories": 24577, "ðŁ§”": 24578, "golfer": 24579, "congratulates": 24580, "resulting": 24581, "mechanic": 24582, "urve": 24583, "centered": 24584, "kiev": 24585, "ans": 24586, "incub": 24587, "<<": 24588, "cmo": 24589, "bestfanarmy": 24590, "daph": 24591, "enham": 24592, "oncology": 24593, "kush": 24594, "txt": 24595, "oriented": 24596, "fashionable": 24597, "csr": 24598, "sahara": 24599, "rack": 24600, "pdp": 24601, "hanson": 24602, "Ć ĀøÄ©": 24603, "tiers": 24604, "rar": 24605, "panam": 24606, "insky": 24607, "sahi": 24608, "testament": 24609, "asthma": 24610, "inher": 24611, "fisheries": 24612, "order": 24613, "howe": 24614, "gallon": 24615, "epis": 24616, "suzanne": 24617, "drowning": 24618, "panelists": 24619, "ðŁĺ²": 24620, "ë¦": 24621, "alach": 24622, "commemorative": 24623, "attribu": 24624, "ðŁij»": 24625, "moo": 24626, "visional": 24627, "weeksary": 24628, "gust": 24629, "akin": 24630, "pointe": 24631, "eee": 24632, "dispar": 24633, "nipp": 24634, "dental": 24635, "stall": 24636, "pian": 24637, "bore": 24638, "ulster": 24639, "tick": 24640, "irr": 24641, "taehyung": 24642, "microphone": 24643, "bermuda": 24644, "gaard": 24645, "eler": 24646, "plumbing": 24647, "hugely": 24648, "âļ«ï¸ı": 24649, "raceway": 24650, "cambridge": 24651, "marcel": 24652, "burnley": 24653, "toast": 24654, "hollywood": 24655, "fasting": 24656, "mered": 24657, "hibition": 24658, "capped": 24659, "beneficial": 24660, "owning": 24661, "contamin": 24662, "arabian": 24663, "toon": 24664, "capac": 24665, "hulu": 24666, "smir": 24667, "nutrients": 24668, "sein": 24669, "graphs": 24670, "conditional": 24671, "ðŁijħ": 24672, "orac": 24673, "playin": 24674, "northe": 24675, "tornad": 24676, "marian": 24677, "jumbo": 24678, "lexi": 24679, "incredibleindia": 24680, "roadto": 24681, "ukone": 24682, "confusing": 24683, "sph": 24684, "shank": 24685, "pied": 24686, "mqm": 24687, "positively": 24688, "sherry": 24689, "pathways": 24690, "considers": 24691, "tofu": 24692, "arguments": 24693, "resilient": 24694, "chett": 24695, "withdra": 24696, "tero": 24697, "atedly": 24698, "swana": 24699, "heb": 24700, "flight": 24701, "harley": 24702, "decrease": 24703, "kindle": 24704, "bookshop": 24705, "³ï¸ı": 24706, "martyrs": 24707, "smur": 24708, "mccl": 24709, "concerto": 24710, "stime": 24711, "rejoice": 24712, "applau": 24713, "clement": 24714, "merkel": 24715, "jaime": 24716, "immortal": 24717, "isleof": 24718, "marco": 24719, "youtuber": 24720, "stalking": 24721, "metoo": 24722, "stack": 24723, "spouse": 24724, "ust": 24725, "luv": 24726, "âļ¾ï¸ı": 24727, "equestrian": 24728, "eving": 24729, "flin": 24730, "nickname": 24731, "thebig": 24732, "asar": 24733, "stacks": 24734, "walker": 24735, "bora": 24736, "kidnapped": 24737, "hurling": 24738, "humbold": 24739, "recalls": 24740, "copper": 24741, "annis": 24742, "seo": 24743, "merger": 24744, "muir": 24745, "addy": 24746, "ðŁēªðŁēª": 24747, "bex": 24748, "cracy": 24749, "conan": 24750, "congratulation": 24751, "midst": 24752, "âϬ": 24753, "forbi": 24754, "optic": 24755, "crate": 24756, "crocodile": 24757, "madagas": 24758, "securing": 24759, "aston": 24760, "ogue": 24761, "savior": 24762, "salisbury": 24763, "loveit": 24764, "fujifilm": 24765, "castles": 24766, "asst": 24767, "arrows": 24768, "spacious": 24769, "trs": 24770, "polyvore": 24771, "progression": 24772, "mri": 24773, "nelson": 24774, "bim": 24775, "indicator": 24776, "oda": 24777, "pepe": 24778, "resignation": 24779, "gut": 24780, "sneaker": 24781, "logically": 24782, "azy": 24783, "arella": 24784, "tearing": 24785, "joshi": 24786, "ssionism": 24787, "qpr": 24788, "mariah": 24789, "px": 24790, "bleed": 24791, "mian": 24792, "medley": 24793, "weiss": 24794, "kerry": 24795, "gatory": 24796, "atal": 24797, "madison": 24798, "avenger": 24799, "naby": 24800, "pland": 24801, "giles": 24802, "freshwater": 24803, "dington": 24804, "taj": 24805, "demonstrates": 24806, "ntv": 24807, "bulbs": 24808, "sundaymorning": 24809, "peake": 24810, "souvenir": 24811, "wah": 24812, "tonnes": 24813, "mkt": 24814, "complexity": 24815, "conden": 24816, "rossi": 24817, "bing": 24818, "yds": 24819, "suk": 24820, "ngo": 24821, "midland": 24822, "oly": 24823, "lifeis": 24824, "ripple": 24825, "moreno": 24826, "dders": 24827, "tus": 24828, "ƔĄ": 24829, "boul": 24830, "xa": 24831, "holdings": 24832, "wny": 24833, "shadowhunters": 24834, "kei": 24835, "aspire": 24836, "mous": 24837, "owen": 24838, "soak": 24839, "skirts": 24840, "mountaine": 24841, "storming": 24842, "chrome": 24843, "riots": 24844, "sarato": 24845, "amaze": 24846, "lessness": 24847, "navar": 24848, "criteria": 24849, "rafa": 24850, "indulge": 24851, "ayer": 24852, "porto": 24853, "namo": 24854, "................": 24855, "yields": 24856, "valle": 24857, "jh": 24858, "macron": 24859, "sains": 24860, "durant": 24861, "trailers": 24862, "wot": 24863, "confederate": 24864, "shrin": 24865, "idol": 24866, "formally": 24867, "tene": 24868, "motorcycles": 24869, "thang": 24870, "node": 24871, "banger": 24872, "daly": 24873, "pats": 24874, "enrollment": 24875, "auctions": 24876, "atal": 24877, "arbor": 24878, "logos": 24879, "dearest": 24880, "transaction": 24881, "domingo": 24882, "flea": 24883, "sermon": 24884, "deck": 24885, "sincere": 24886, "questioning": 24887, "julio": 24888, "wasp": 24889, "pretz": 24890, "armenian": 24891, "kham": 24892, "inflammation": 24893, "picturesque": 24894, "accidental": 24895, "filmmakers": 24896, "ðŁĺļ": 24897, "ðŁēį": 24898, "casey": 24899, "sob": 24900, "yeezy": 24901, "goodwill": 24902, "paragra": 24903, "ssly": 24904, "feather": 24905, "dyed": 24906, "assassination": 24907, "nade": 24908, "bcs": 24909, "applies": 24910, "feminine": 24911, "feu": 24912, "extent": 24913, "deputies": 24914, "lack": 24915, "psychic": 24916, "goi": 24917, "killings": 24918, "pseu": 24919, "ðŁ¤ª": 24920, "unc": 24921, "marl": 24922, "tane": 24923, "mckenna": 24924, "surfer": 24925, "influences": 24926, "freeway": 24927, "hackney": 24928, "malaria": 24929, "eland": 24930, "teau": 24931, "remastered": 24932, "ر": 24933, "razor": 24934, "ggy": 24935, "corro": 24936, "laksh": 24937, "flair": 24938, "honesty": 24939, "hooray": 24940, "depp": 24941, "amc": 24942, "wednesdays": 24943, "qa": 24944, "edits": 24945, "-$": 24946, "sevilla": 24947, "doubled": 24948, "humanities": 24949, "ccot": 24950, "somos": 24951, "rine": 24952, "afa": 24953, "sioux": 24954, "reconstruction": 24955, "welding": 24956, "threads": 24957, "amish": 24958, "encouragement": 24959, "poder": 24960, "bock": 24961, "balm": 24962, "ptions": 24963, "standup": 24964, "accomplishments": 24965, "guarding": 24966, "conviction": 24967, "acion": 24968, "napoleon": 24969, "depicting": 24970, "attack": 24971, "sui": 24972, "wearable": 24973, "âĸªï¸ı": 24974, "potter": 24975, "escort": 24976, "vise": 24977, "tots": 24978, "boon": 24979, "eventprofs": 24980, "angular": 24981, "womenshistorymonth": 24982, "barrow": 24983, "schi": 24984, "accomp": 24985, "tik": 24986, "lend": 24987, "kensington": 24988, "wolfe": 24989, "stacked": 24990, "crashing": 24991, "exhibit": 24992, "winged": 24993, "sabrina": 24994, "masa": 24995, "kms": 24996, "always": 24997, "ett": 24998, "plasma": 24999, "counseling": 25000, "pickles": 25001, "nfldraft": 25002, "mrs": 25003, "inevitable": 25004, "courageous": 25005, "stafford": 25006, "writerslife": 25007, "hos": 25008, "ej": 25009, "ghyun": 25010, "trademark": 25011, "adrian": 25012, "influencer": 25013, "coronation": 25014, "raging": 25015, "explored": 25016, "usaf": 25017, "exception": 25018, "eux": 25019, "tanker": 25020, "swami": 25021, "packet": 25022, "ðŁij¨âĢį": 25023, "fen": 25024, "sheen": 25025, "aero": 25026, "jl": 25027, "regal": 25028, "nwt": 25029, "auster": 25030, "mehta": 25031, "charge": 25032, "aste": 25033, "bate": 25034, "infeld": 25035, "racecourse": 25036, "collapsed": 25037, "fleece": 25038, "zil": 25039, "allie": 25040, "alternatives": 25041, "georges": 25042, "ðŁĵį": 25043, "quirky": 25044, "fcb": 25045, "natgeo": 25046, "philanthropy": 25047, "brai": 25048, "everyday": 25049, "ðŁIJ°": 25050, "achers": 25051, "jaan": 25052, "fines": 25053, "qi": 25054, "fisherman": 25055, "distinct": 25056, "grimes": 25057, "nationalist": 25058, "commence": 25059, "rown": 25060, "â̳": 25061, "zing": 25062, "fter": 25063, "hrw": 25064, "baroque": 25065, "blender": 25066, "kitty": 25067, "hooks": 25068, "cited": 25069, "wanda": 25070, "consensus": 25071, "reindeer": 25072, "anand": 25073, "supply": 25074, "meds": 25075, "vn": 25076, "olph": 25077, "ratchet": 25078, "sheldon": 25079, "securities": 25080, "ë°©íĄ": 25081, "crom": 25082, "mosquito": 25083, "jeric": 25084, "immac": 25085, "dimensions": 25086, "â¤": 25087, "dissi": 25088, "spongebob": 25089, "damien": 25090, "stevenson": 25091, "joanne": 25092, "delish": 25093, "yikes": 25094, "thanx": 25095, "surveys": 25096, "postponed": 25097, "alcoholic": 25098, "alised": 25099, "ðŁĻıðŁı»": 25100, "doch": 25101, "sentim": 25102, "meredith": 25103, "compares": 25104, "bago": 25105, "happydays": 25106, "moss": 25107, "ãħĭ": 25108, "nec": 25109, "gnment": 25110, "frustrated": 25111, "combin": 25112, "riv": 25113, "eclec": 25114, "collo": 25115, "compliment": 25116, "actorslife": 25117, "ctto": 25118, "nicar": 25119, "ophon": 25120, "aparthe": 25121, "mant": 25122, "jade": 25123, "trolley": 25124, "optimization": 25125, "eyeon": 25126, "ecological": 25127, "quist": 25128, "ephe": 25129, "à„ĩ": 25130, "cinco": 25131, "appoints": 25132, "oldschool": 25133, "cpr": 25134, "behavioral": 25135, "minaj": 25136, ":-(": 25137, "tagging": 25138, "eval": 25139, "joaqu": 25140, "ðŁĺ«": 25141, "hak": 25142, "deme": 25143, "jamaican": 25144, "sos": 25145, "hyatt": 25146, "handbook": 25147, "librarian": 25148, "hannibal": 25149, "pumping": 25150, "chom": 25151, "fman": 25152, "gai": 25153, "hull": 25154, "responders": 25155, "greenville": 25156, "nus": 25157, "vaugh": 25158, "ðŁİīðŁİī": 25159, "taxi": 25160, "goldberg": 25161, "mantra": 25162, "tease": 25163, "forbidden": 25164, "methodist": 25165, "ativity": 25166, "****": 25167, "ect": 25168, "mcgr": 25169, "Ħƫĭ": 25170, "seb": 25171, "amidst": 25172, "disappear": 25173, "thyro": 25174, "philips": 25175, "erina": 25176, "vicious": 25177, "streamer": 25178, "millionaire": 25179, "map": 25180, "strick": 25181, "hackathon": 25182, "gha": 25183, "edic": 25184, "mika": 25185, "peck": 25186, "illi": 25187, "antoine": 25188, "arca": 25189, "optic": 25190, "maure": 25191, "ðŁĩ¦ðŁĩº": 25192, "clashes": 25193, "manly": 25194, "âĺģ": 25195, "alvar": 25196, "andres": 25197, "mei": 25198, "elm": 25199, "wwww": 25200, "altered": 25201, "lte": 25202, "ê¹Ģ": 25203, "mojo": 25204, "forrest": 25205, "thalai": 25206, "nont": 25207, "speeches": 25208, "acknowledge": 25209, "ignite": 25210, "xfactor": 25211, "ðŁ„Ĥ": 25212, "meadow": 25213, "disrupt": 25214, "debuted": 25215, "scrimmage": 25216, "pharmaceutical": 25217, "fidd": 25218, "foundations": 25219, "philosopher": 25220, "etal": 25221, "publishers": 25222, "boys": 25223, "cke": 25224, "rugged": 25225, "optimism": 25226, "rebe": 25227, "philharmon": 25228, "narcis": 25229, "rallies": 25230, "luis": 25231, "goblue": 25232, "folded": 25233, "unacceptable": 25234, "optimal": 25235, "lisa": 25236, "polaro": 25237, "+.": 25238, "enza": 25239, "âĿ£ï¸ı": 25240, "monopoly": 25241, "graceful": 25242, "dairy": 25243, "dua": 25244, "difficulty": 25245, "judgement": 25246, "osi": 25247, "mersey": 25248, "flux": 25249, "newfound": 25250, "terns": 25251, "dimensional": 25252, "invic": 25253, "alba": 25254, "amit": 25255, "abudhabi": 25256, "algeria": 25257, "automobile": 25258, "thead": 25259, "lotion": 25260, "accelerator": 25261, "vacant": 25262, "ition": 25263, "luf": 25264, "alic": 25265, "pll": 25266, "blazing": 25267, "baz": 25268, "sene": 25269, "ðŁij¼": 25270, "villains": 25271, "directory": 25272, "eisen": 25273, "tock": 25274, "brochure": 25275, "ripp": 25276, "hbd": 25277, "zaynmalik": 25278, "niche": 25279, "lolol": 25280, "certificates": 25281, "morse": 25282, "facup": 25283, "xham": 25284, "unwanted": 25285, "imports": 25286, "carnegie": 25287, "fansign": 25288, "mou": 25289, "ralph": 25290, "destroyer": 25291, "swing": 25292, "trekking": 25293, "ciliation": 25294, "pitbull": 25295, "gaps": 25296, "howell": 25297, "definitive": 25298, "mcle": 25299, "fps": 25300, "etz": 25301, "bolly": 25302, "lynn": 25303, "gano": 25304, "ature": 25305, "fursuit": 25306, "coil": 25307, "nav": 25308, "butts": 25309, "trojans": 25310, "eure": 25311, "enko": 25312, "schumer": 25313, "horrific": 25314, "installment": 25315, "brb": 25316, "suburbs": 25317, "abel": 25318, "vir": 25319, "desh": 25320, "cunningham": 25321, "ðŁIJ»": 25322, "spann": 25323, "schwe": 25324, "kemp": 25325, "tru": 25326, "stealth": 25327, "ques": 25328, "lew": 25329, "delights": 25330, "koch": 25331, "humili": 25332, "criti": 25333, "ilt": 25334, "spells": 25335, "miley": 25336, "caric": 25337, "ðŁį“": 25338, "lcfc": 25339, "substitute": 25340, "oung": 25341, "?!!": 25342, "affir": 25343, "predictable": 25344, "classof": 25345, "err": 25346, "cypress": 25347, "chandra": 25348, "ageing": 25349, "____": 25350, "therland": 25351, "doncaster": 25352, "elin": 25353, "yoshi": 25354, "sailors": 25355, "harris": 25356, "joanna": 25357, "nigerians": 25358, "hers": 25359, "plague": 25360, "procra": 25361, "kno": 25362, "canton": 25363, "busines": 25364, "unh": 25365, "prakash": 25366, "cin": 25367, "bowen": 25368, "coating": 25369, "mals": 25370, "begging": 25371, "smithson": 25372, "pontiac": 25373, "spies": 25374, "damian": 25375, "pline": 25376, "undant": 25377, "alta": 25378, "oness": 25379, "shameless": 25380, "daq": 25381, "bbm": 25382, "wales": 25383, "stampede": 25384, "serum": 25385, "ÙĨ": 25386, "catalyst": 25387, "xn": 25388, "absc": 25389, "freezer": 25390, "chun": 25391, "arios": 25392, "mccre": 25393, "forehead": 25394, "hears": 25395, "damascus": 25396, "tacoma": 25397, "arduino": 25398, "encounters": 25399, "stanton": 25400, "lgb": 25401, "abas": 25402, "\"..": 25403, "kete": 25404, "dracula": 25405, "elem": 25406, "gne": 25407, "zeppelin": 25408, "labrador": 25409, "pulp": 25410, "optional": 25411, "orn": 25412, "russians": 25413, "sanitation": 25414, "hilary": 25415, "etsymntt": 25416, "penalties": 25417, "aust": 25418, "igans": 25419, "olympian": 25420, "medicaid": 25421, "versace": 25422, "vape": 25423, "restra": 25424, "peep": 25425, "sexiest": 25426, "stalls": 25427, "dile": 25428, "thea": 25429, "punjabi": 25430, "puppy": 25431, "tuesdaymotivation": 25432, "ðŁĵļ": 25433, "theflash": 25434, "rocket": 25435, "modest": 25436, "chihuahu": 25437, "onna": 25438, "ksa": 25439, "hurdles": 25440, "cave": 25441, "failures": 25442, "split": 25443, "boho": 25444, "gurl": 25445, "disappoint": 25446, "howard": 25447, "nugget": 25448, "franz": 25449, "stalert": 25450, "kazakh": 25451, "forgetting": 25452, "schri": 25453, "agate": 25454, "amat": 25455, "everett": 25456, "duet": 25457, "veterinary": 25458, "julian": 25459, "chills": 25460, "brave": 25461, "ghostbusters": 25462, "lando": 25463, "greets": 25464, "profitable": 25465, "dé": 25466, "tir": 25467, "zee": 25468, "omen": 25469, "pdx": 25470, "grayson": 25471, "hari": 25472, "fixes": 25473, "stabbing": 25474, "swimmer": 25475, "symbols": 25476, "compliments": 25477, "pose": 25478, "functioning": 25479, "thnx": 25480, "gir": 25481, "corporations": 25482, "barlow": 25483, "loe": 25484, "offseason": 25485, "distinctive": 25486, "marvelous": 25487, "nikon": 25488, "enrique": 25489, "kyu": 25490, "jaws": 25491, "amoto": 25492, "lombar": 25493, "travelblogger": 25494, "fah": 25495, "ourism": 25496, "tristan": 25497, "soe": 25498, "cease": 25499, "ðŁıħ": 25500, "zac": 25501, "mckenzie": 25502, "taxpayers": 25503, "swimsuit": 25504, "blo": 25505, "lesley": 25506, "kansas": 25507, "wks": 25508, "kiel": 25509, "provoking": 25510, "myles": 25511, "string": 25512, "kangaroo": 25513, "galactic": 25514, "fifth": 25515, "ske": 25516, "weir": 25517, "llis": 25518, "matory": 25519, "ðŁĩ¿": 25520, "unci": 25521, "reproductive": 25522, "rooting": 25523, "tides": 25524, "gadget": 25525, "..........": 25526, "alexander": 25527, "bowler": 25528, "screw": 25529, "apolog": 25530, "erika": 25531, "walters": 25532, "shetty": 25533, "lane": 25534, "banter": 25535, "asant": 25536, "meso": 25537, "vain": 25538, "\"\"\"": 25539, "usi": 25540, "ferdin": 25541, "accomplish": 25542, "mansfield": 25543, "bombar": 25544, "collaborating": 25545, "clap": 25546, "iture": 25547, "sda": 25548, "smoky": 25549, "nak": 25550, "imperson": 25551, "carla": 25552, "comra": 25553, "burgl": 25554, "loco": 25555, "ties": 25556, "inhi": 25557, "tracey": 25558, "seis": 25559, "disser": 25560, "rrrr": 25561, "dray": 25562, "protect": 25563, "corona": 25564, "hunger": 25565, "cken": 25566, "celi": 25567, "troubled": 25568, "predators": 25569, "fictional": 25570, "shaved": 25571, "richest": 25572, "metaboli": 25573, "fulham": 25574, "grooming": 25575, "monochrome": 25576, "wasting": 25577, "asco": 25578, "aste": 25579, "tista": 25580, "remedies": 25581, "ungsoo": 25582, "southend": 25583, "permanently": 25584, "bumble": 25585, "procrastin": 25586, "identical": 25587, "practically": 25588, "mascul": 25589, "suke": 25590, "assured": 25591, "valerie": 25592, "deviant": 25593, "grizzlies": 25594, "thier": 25595, "pura": 25596, "nepal": 25597, "notts": 25598, "bilateral": 25599, "spoil": 25600, "carmel": 25601, "cinematic": 25602, "phl": 25603, "nifty": 25604, "mao": 25605, "hypocri": 25606, "laser": 25607, "pantry": 25608, "mathematical": 25609, "elisa": 25610, "coordination": 25611, "belmont": 25612, "ait": 25613, "radiant": 25614, "boiler": 25615, "mang": 25616, "fag": 25617, "crc": 25618, "hams": 25619, "brin": 25620, "â¬ĩï¸ı": 25621, "familia": 25622, "âĿ£": 25623, "saber": 25624, "rupert": 25625, "ggan": 25626, "ritz": 25627, "mich": 25628, "salford": 25629, "levi": 25630, "gral": 25631, "ðŁē¤": 25632, "nino": 25633, "ced": 25634, "businessman": 25635, "ultr": 25636, "simply": 25637, "compression": 25638, "pains": 25639, "halt": 25640, "ë°©íĄĦ": 25641, "landscaping": 25642, "nf": 25643, "crooked": 25644, "erd": 25645, "ittin": 25646, "ddleston": 25647, "surpassed": 25648, "inoa": 25649, "dag": 25650, "blen": 25651, "extending": 25652, "ating": 25653, "algae": 25654, "baller": 25655, "umar": 25656, "snooker": 25657, "collu": 25658, "flown": 25659, "thub": 25660, "ridiculously": 25661, "kish": 25662, "ople": 25663, "dire": 25664, "asser": 25665, "aristo": 25666, "sciss": 25667, "hating": 25668, "trouble": 25669, "sylvia": 25670, "succul": 25671, "plots": 25672, "sincerely": 25673, "aler": 25674, "laureate": 25675, "brack": 25676, "attn": 25677, "rifles": 25678, "meto": 25679, "collectible": 25680, "cuomo": 25681, "contestant": 25682, "consistency": 25683, "antz": 25684, "ranges": 25685, "abigail": 25686, "deb": 25687, "minister": 25688, "growers": 25689, "anoo": 25690, "hoover": 25691, "dreamer": 25692, "nucle": 25693, "research": 25694, "miy": 25695, "shahid": 25696, "mav": 25697, "dhoni": 25698, "cini": 25699, "doj": 25700, "hindus": 25701, "partying": 25702, "dali": 25703, "alonso": 25704, "informal": 25705, "clarkson": 25706, "itton": 25707, "kian": 25708, "cityo": 25709, "mori": 25710, "lasted": 25711, "aspen": 25712, "library": 25713, "suspici": 25714, "quat": 25715, "denial": 25716, "folder": 25717, "chori": 25718, "sweeping": 25719, "enix": 25720, "ðŁįĤ": 25721, "ØŃ": 25722, "nascar": 25723, "handmadehour": 25724, "moul": 25725, "heatwave": 25726, "emer": 25727, "examine": 25728, "ibn": 25729, "grind": 25730, "pov": 25731, "tionist": 25732, "mbo": 25733, "sheila": 25734, "integrate": 25735, "omes": 25736, "takeaway": 25737, "cerv": 25738, "connie": 25739, "ticket": 25740, "celed": 25741, "bien": 25742, "visually": 25743, "madagascar": 25744, "sorry": 25745, "gui": 25746, "parkrun": 25747, "traits": 25748, "labe": 25749, "poisoning": 25750, "à„Ģ": 25751, "viable": 25752, "bohemian": 25753, "dentistry": 25754, "bados": 25755, "sprouts": 25756, "masked": 25757, "teddy": 25758, "ðŁĺ·": 25759, "saf": 25760, "saas": 25761, "jiang": 25762, "tight": 25763, "speaker": 25764, "withdrawal": 25765, "bcn": 25766, "assigned": 25767, "classrooms": 25768, "fleming": 25769, "ðŁē«": 25770, "supergirl": 25771, "totals": 25772, "tabletop": 25773, "ebooks": 25774, "horizontal": 25775, "craz": 25776, "flush": 25777, "jard": 25778, "cdc": 25779, "erson": 25780, "ãħł": 25781, "greenwood": 25782, "nih": 25783, "cox": 25784, "ada": 25785, "litre": 25786, "going": 25787, "vicky": 25788, "curved": 25789, "louie": 25790, "grains": 25791, "hye": 25792, "longe": 25793, "remedy": 25794, "trainee": 25795, "sanjay": 25796, "superstars": 25797, "maser": 25798, "manu": 25799, "sage": 25800, "whl": 25801, "ðŁĺĤðŁĺŃ": 25802, "ðŁijįðŁı»": 25803, "msd": 25804, "enz": 25805, "rabhu": 25806, "joo": 25807, "ghu": 25808, "acer": 25809, "epo": 25810, "resurrection": 25811, "justicefor": 25812, "blended": 25813, "moda": 25814, "avalanche": 25815, "francesco": 25816, "respective": 25817, "gs": 25818, "yeast": 25819, "welch": 25820, "devotion": 25821, "getin": 25822, "atheism": 25823, "amic": 25824, "carolyn": 25825, "loc": 25826, "ldnont": 25827, "avec": 25828, "usda": 25829, "legged": 25830, "bravery": 25831, "blower": 25832, "cowboy": 25833, "heh": 25834, "stible": 25835, "buffal": 25836, "channel": 25837, "runchat": 25838, "âĺķï¸ı": 25839, "ideology": 25840, "bestseller": 25841, "yoo": 25842, "peanu": 25843, "bonne": 25844, "felic": 25845, "edison": 25846, "fractu": 25847, "narendra": 25848, "ppets": 25849, "seymour": 25850, "riviera": 25851, "hector": 25852, "necessarily": 25853, "bianca": 25854, "societies": 25855, "thebest": 25856, "wg": 25857, "sentences": 25858, "wink": 25859, "vaccines": 25860, "palooza": 25861, "jamming": 25862, "asf": 25863, "mpus": 25864, "agreements": 25865, "eck": 25866, "bac": 25867, "honore": 25868, "compul": 25869, "wildcat": 25870, "imposed": 25871, "yoga": 25872, "hudson": 25873, "canceled": 25874, "lich": 25875, "fuzzy": 25876, "esque": 25877, "chuk": 25878, "wvu": 25879, "sek": 25880, "flipping": 25881, "rhon": 25882, "wished": 25883, "wha": 25884, "capability": 25885, "lenovo": 25886, "ìĨĮëħĦëĭ": 25887, "vivo": 25888, "tvd": 25889, "nora": 25890, "silk": 25891, "pasadena": 25892, "yosemite": 25893, "valuation": 25894, "clocks": 25895, "uber": 25896, "mrc": 25897, "darkest": 25898, "aubre": 25899, "sso": 25900, "belly": 25901, "wrestlers": 25902, "killin": 25903, "louder": 25904, "buckley": 25905, "geel": 25906, "adon": 25907, "uns": 25908, "appealing": 25909, "ðŁij¯": 25910, "semitism": 25911, "listens": 25912, "fitz": 25913, "ãĄ³ãĄ": 25914, "nylon": 25915, "arty": 25916, "seemingly": 25917, "hala": 25918, "suited": 25919, "ety": 25920, "sheds": 25921, "muffins": 25922, "apric": 25923, "uments": 25924, "uta": 25925, "jammu": 25926, "chelseafc": 25927, "starz": 25928, "yoko": 25929, "root": 25930, "cleansing": 25931, "diar": 25932, "pioneering": 25933, "iheartradio": 25934, "digiti": 25935, "findyour": 25936, "cano": 25937, "ðŁēİ": 25938, "zol": 25939, "spacecraft": 25940, "sixers": 25941, "moisturi": 25942, "bile": 25943, "tists": 25944, "horton": 25945, "ranging": 25946, "columbi": 25947, "meteoro": 25948, "sentiment": 25949, "epl": 25950, "footh": 25951, "textbook": 25952, "drainage": 25953, "rly": 25954, "scue": 25955, "imrankhan": 25956, "ðŁē¸": 25957, "margarita": 25958, "eddy": 25959, "predicts": 25960, "gamergate": 25961, "advise": 25962, "growthhacking": 25963, "loveyou": 25964, "ugand": 25965, "vf": 25966, "benghazi": 25967, "slater": 25968, "newor": 25969, "chel": 25970, "independenceday": 25971, "pnp": 25972, "cullen": 25973, "hoodies": 25974, "numbered": 25975, "britt": 25976, "tsa": 25977, "kltu": 25978, "sages": 25979, "momo": 25980, "oneplus": 25981, "coll": 25982, "guts": 25983, "wta": 25984, "mesmeri": 25985, "enhancing": 25986, "chiroprac": 25987, "jis": 25988, "teenagers": 25989, "mone": 25990, "constellation": 25991, "sweepstakes": 25992, "eze": 25993, "slovakia": 25994, "laye": 25995, "pearce": 25996, "waver": 25997, "pogba": 25998, "kron": 25999, "surgeons": 26000, "marx": 26001, "tid": 26002, "gga": 26003, "descend": 26004, "pours": 26005, "uprising": 26006, "walla": 26007, "sabbath": 26008, "bachelore": 26009, "mackin": 26010, "kam": 26011, "peterborough": 26012, "hora": 26013, "ðŁĮŁðŁĮŁ": 26014, "thinkbig": 26015, "rj": 26016, "hydrau": 26017, "spal": 26018, "universit": 26019, "ðŁıī": 26020, "mailonline": 26021, "leagueof": 26022, "tenants": 26023, "wally": 26024, "lance": 26025, "heavens": 26026, "ddr": 26027, "bolts": 26028, "amir": 26029, "iphone": 26030, "cigar": 26031, "endu": 26032, "rei": 26033, "elabor": 26034, "ringing": 26035, "johnson": 26036, "characteristics": 26037, "saloon": 26038, "algorithms": 26039, "talkin": 26040, "mtn": 26041, "dive": 26042, "regionals": 26043, "ffice": 26044, "hati": 26045, "deviantart": 26046, "sotto": 26047, "shiro": 26048, "lama": 26049, "kwe": 26050, "faded": 26051, "porting": 26052, "tummy": 26053, "estates": 26054, "buenos": 26055, "ð٦ģ": 26056, "believer": 26057, "penetr": 26058, "darn": 26059, "spite": 26060, "canopy": 26061, "fashioni": 26062, "tilla": 26063, "petals": 26064, "elijah": 26065, "brawl": 26066, "martyr": 26067, "ë°©íĄĦìĨĮëħĦëĭ": 26068, "midtown": 26069, "erich": 26070, "dapper": 26071, "smtown": 26072, "megam": 26073, "www": 26074, "lele": 26075, "ons": 26076, "catfish": 26077, "firth": 26078, "fossilfriday": 26079, "ballpark": 26080, "thaw": 26081, "potent": 26082, "illie": 26083, "creep": 26084, "carp": 26085, "soap": 26086, "gundam": 26087, "infec": 26088, "yyyyy": 26089, "न": 26090, "zag": 26091, "ritt": 26092, "calculator": 26093, "boca": 26094, "oko": 26095, "toad": 26096, "threaten": 26097, "refined": 26098, "olympic": 26099, "accomplishment": 26100, "bacterial": 26101, "aji": 26102, "tatum": 26103, "feliz": 26104, "sheed": 26105, "jat": 26106, "thic": 26107, "jamal": 26108, "ðĿĺ": 26109, "lina": 26110, "ðŁIJ¯": 26111, "joking": 26112, "yotpo": 26113, "pinch": 26114, "akron": 26115, "herb": 26116, "motivation": 26117, "lia": 26118, "hostage": 26119, "creek": 26120, "gamble": 26121, "russell": 26122, "patti": 26123, "fotos": 26124, "cpc": 26125, "broken": 26126, "backthe": 26127, "clays": 26128, "umm": 26129, "stockton": 26130, "maternal": 26131, "ür": 26132, "lakel": 26133, "century": 26134, "bek": 26135, "infected": 26136, "Ć ĀøĀ”": 26137, "smackdown": 26138, "manned": 26139, "tahoe": 26140, "smes": 26141, "basa": 26142, "sula": 26143, "augusta": 26144, ".*": 26145, "rohingya": 26146, "greed": 26147, "counselor": 26148, "silhouette": 26149, "gravit": 26150, "clause": 26151, "'-": 26152, "bobc": 26153, "occasions": 26154, "nowadays": 26155, "dictat": 26156, "beard": 26157, "nally": 26158, "brightest": 26159, "kabul": 26160, "incindia": 26161, "dhanush": 26162, "archaeological": 26163, "cheape": 26164, "mizzou": 26165, "dhi": 26166, "ovski": 26167, "baxter": 26168, "assemble": 26169, "â": 26170, "gigi": 26171, "acam": 26172, "wisely": 26173, "hazard": 26174, "northampton": 26175, "âľĪï¸ı": 26176, "meth": 26177, "blasting": 26178, "reunite": 26179, "mulus": 26180, "alizes": 26181, "tread": 26182, "mila": 26183, "edward": 26184, "kova": 26185, "pesto": 26186, "ðŁij¶": 26187, "vitz": 26188, "hydraulic": 26189, "refurbished": 26190, "motel": 26191, "isabella": 26192, "homme": 26193, "severance": 26194, "uphol": 26195, "miserable": 26196, "fari": 26197, "latter": 26198, "efer": 26199, "crackers": 26200, "esl": 26201, "acio": 26202, "yyj": 26203, "inan": 26204, "ecb": 26205, "zind": 26206, "panas": 26207, "trucking": 26208, "reed": 26209, "shaker": 26210, "burgess": 26211, "empire": 26212, "agnes": 26213, "nington": 26214, "artworks": 26215, "frs": 26216, "tile": 26217, "biome": 26218, "eun": 26219, "chong": 26220, "americana": 26221, "godfather": 26222, "goblin": 26223, "ishi": 26224, "!).": 26225, "tempted": 26226, "genomics": 26227, "mandate": 26228, "cky": 26229, "ðŁēĻðŁēĽ": 26230, "somali": 26231, "brandy": 26232, "inven": 26233, "spokesperson": 26234, "pcb": 26235, "yuan": 26236, "hg": 26237, "faz": 26238, "starwars": 26239, "rowan": 26240, "bluegrass": 26241, "dong": 26242, "dday": 26243, "trinidad": 26244, "erton": 26245, "banning": 26246, "retention": 26247, "cured": 26248, "toberfest": 26249, "reset": 26250, "weis": 26251, "detached": 26252, "behindthescenes": 26253, "immunity": 26254, "pha": 26255, "bray": 26256, "ðŁij½": 26257, "rancho": 26258, "ramsay": 26259, "estonia": 26260, "ndtv": 26261, "].": 26262, "cabaret": 26263, "taro": 26264, "dv": 26265, "showcases": 26266, "plum": 26267, "ðŁij¸": 26268, "sonoma": 26269, "prepa": 26270, "memorab": 26271, "estu": 26272, "driveway": 26273, "ules": 26274, "magnus": 26275, "xr": 26276, "nnn": 26277, "muchas": 26278, "enge": 26279, "streamed": 26280, "forestry": 26281, "audiobook": 26282, "troy": 26283, "reckless": 26284, "kilom": 26285, "ruler": 26286, "rak": 26287, "procession": 26288, "ions": 26289, "poole": 26290, "noctur": 26291, "whs": 26292, "farmhouse": 26293, "pera": 26294, "parme": 26295, "hypocrisy": 26296, "sics": 26297, "vant": 26298, "cask": 26299, "holistic": 26300, "aust": 26301, "п": 26302, "indo": 26303, "ðŁij©âĢį": 26304, "diso": 26305, "dispatch": 26306, "olsen": 26307, "makeit": 26308, "ennis": 26309, "centre": 26310, "arrange": 26311, "ðŁĮ¼": 26312, "salted": 26313, "easiest": 26314, "fate": 26315, "regatta": 26316, "mozz": 26317, "acan": 26318, "sini": 26319, "gically": 26320, "chops": 26321, "chicken": 26322, "workin": 26323, "hagg": 26324, "involve": 26325, "weeds": 26326, "bookday": 26327, "wakeup": 26328, "kyr": 26329, "michelin": 26330, "fuss": 26331, "rejuven": 26332, "vacancies": 26333, "incarcer": 26334, "mst": 26335, "scents": 26336, "sovereign": 26337, "kicker": 26338, "à§": 26339, "bod": 26340, "âĢĶ>": 26341, "sah": 26342, "mobil": 26343, "shropshire": 26344, "ophone": 26345, "dresser": 26346, "missuni": 26347, "hepburn": 26348, "imo": 26349, "foliage": 26350, "diagnostic": 26351, "assan": 26352, "cycling": 26353, "guilt": 26354, "csa": 26355, "puertorico": 26356, "winelover": 26357, "wakefield": 26358, "doggy": 26359, "khe": 26360, "papp": 26361, "cog": 26362, "allot": 26363, "cuck": 26364, "poetic": 26365, "mio": 26366, "revit": 26367, "magician": 26368, "ç„": 26369, "antenna": 26370, "westwood": 26371, "mberg": 26372, "luxe": 26373, "oatmeal": 26374, "ج": 26375, "teat": 26376, "ffee": 26377, "searches": 26378, "lly": 26379, "pluto": 26380, "elon": 26381, "lettering": 26382, "innocence": 26383, "fai": 26384, "annon": 26385, "telangana": 26386, "mait": 26387, "neural": 26388, "canni": 26389, "aroma": 26390, "astor": 26391, "fex": 26392, "cocac": 26393, "monetary": 26394, "fent": 26395, "unsure": 26396, "'@": 26397, "indirec": 26398, "tehran": 26399, "isolation": 26400, "libs": 26401, "makeup": 26402, "mercedes": 26403, "ffy": 26404, "hetero": 26405, "deo": 26406, "scom": 26407, "cursed": 26408, "veteransday": 26409, "frankenstein": 26410, "shrews": 26411, "deco": 26412, "geese": 26413, "leftover": 26414, "hadid": 26415, "variable": 26416, "academics": 26417, "carolin": 26418, "undergoing": 26419, "variation": 26420, "nah": 26421, "ssier": 26422, "gamersunite": 26423, "pursuing": 26424, "emerged": 26425, "llers": 26426, "controlling": 26427, "roaring": 26428, "meteor": 26429, "volt": 26430, "dawgs": 26431, "beaver": 26432, "islife": 26433, "bathrooms": 26434, "acional": 26435, "prevent": 26436, "lakedistrict": 26437, "inals": 26438, "yani": 26439, "grabbing": 26440, "sacks": 26441, "lez": 26442, "sway": 26443, "kool": 26444, "times": 26445, "klopp": 26446, "lade": 26447, "concord": 26448, "resulted": 26449, "revive": 26450, "reconciliation": 26451, "oland": 26452, "azz": 26453, "giro": 26454, "mandarin": 26455, "deen": 26456, "nutritional": 26457, "iscoming": 26458, "vani": 26459, "awwww": 26460, "derived": 26461, "loveyour": 26462, "stopthe": 26463, "shouting": 26464, "novak": 26465, "ðŁĻĮðŁı¾": 26466, "loaf": 26467, "displaying": 26468, "sundaywith": 26469, "maguire": 26470, "cheri": 26471, "ðŁıŁ": 26472, "rematch": 26473, "quic": 26474, "Ú©": 26475, "yin": 26476, "ðŁĺ¹": 26477, "ilive": 26478, "zip": 26479, "ourke": 26480, "downloads": 26481, "swat": 26482, "mississ": 26483, "carers": 26484, "tment": 26485, "property": 26486, "hahahahahaha": 26487, "gibbs": 26488, "surrey": 26489, "arise": 26490, "ticism": 26491, "stia": 26492, "irling": 26493, "frog": 26494, "cose": 26495, "bassist": 26496, "foreig": 26497, "leau": 26498, "pillows": 26499, "holla": 26500, "elie": 26501, "disclosure": 26502, "peanuts": 26503, "intech": 26504, "wwc": 26505, "plunge": 26506, "triumph": 26507, "cori": 26508, "slippers": 26509, "ðŁĻıðŁĻı": 26510, "neutrality": 26511, "mare": 26512, "hairy": 26513, "gangster": 26514, "humming": 26515, "custard": 26516, "merlin": 26517, "alea": 26518, "sby": 26519, "damp": 26520, "mohan": 26521, "verbal": 26522, "jst": 26523, "gutted": 26524, "bjor": 26525, "unfinished": 26526, "ðŁĩ¯ðŁĩµ": 26527, "unhappy": 26528, "âļ«ï¸ı": 26529, "bypass": 26530, "atsu": 26531, "fischer": 26532, "sav": 26533, "africans": 26534, "reuse": 26535, "midway": 26536, "demolished": 26537, "gerrard": 26538, "hercules": 26539, "ÄŁ": 26540, "medicines": 26541, "clicking": 26542, "surround": 26543, "joong": 26544, "waving": 26545, "tribes": 26546, "wetlands": 26547, "officiel": 26548, "arguing": 26549, "lle": 26550, "dova": 26551, "suzy": 26552, "clubhouse": 26553, "negro": 26554, "obtain": 26555, "gao": 26556, "glance": 26557, "assist": 26558, "chos": 26559, "ãĤ¢": 26560, "âĺķ": 26561, "adrid": 26562, "occurs": 26563, "stans": 26564, "pardon": 26565, "liveli": 26566, "employed": 26567, "revisit": 26568, "ffxiv": 26569, "bble": 26570, "nearing": 26571, "miner": 26572, "ðŁĺ¹": 26573, "giovanni": 26574, "upto": 26575, "marvell": 26576, "marse": 26577, "towels": 26578, "cbn": 26579, "engineered": 26580, "yelling": 26581, "spartan": 26582, "sians": 26583, "ðŁĻĮðŁı¼": 26584, "sev": 26585, "coyote": 26586, "stadi": 26587, "tcm": 26588, "appen": 26589, "shenanigans": 26590, "openaccess": 26591, "soaked": 26592, "masqu": 26593, "levine": 26594, "strokes": 26595, "lk": 26596, "apartheid": 26597, "hiphop": 26598, "chardon": 26599, "maymay": 26600, "haasan": 26601, "stripped": 26602, "fro": 26603, "scription": 26604, "fton": 26605, "hf": 26606, "prisons": 26607, "marshal": 26608, "ķãĤ": 26609, "ancho": 26610, "compromise": 26611, "classification": 26612, "buzzfeed": 26613, "bbloggers": 26614, "deserving": 26615, ")/": 26616, "sway": 26617, "obo": 26618, "campers": 26619, "podernfamily": 26620, "poured": 26621, "brie": 26622, "squirrels": 26623, "seize": 26624, ":#": 26625, "lek": 26626, "timb": 26627, "stacy": 26628, "nasdaq": 26629, "repeatedly": 26630, "brat": 26631, "mighty": 26632, "competitor": 26633, "mahone": 26634, "desi": 26635, "oke": 26636, "bmw": 26637, "shie": 26638, "fcb": 26639, "cheapest": 26640, "minimalist": 26641, "paramount": 26642, "nate": 26643, "haras": 26644, "insanity": 26645, "lateral": 26646, "mentality": 26647, "mozam": 26648, "tapped": 26649, "yadav": 26650, "usp": 26651, "bway": 26652, "theod": 26653, "bilt": 26654, "raids": 26655, "empress": 26656, "adapted": 26657, "patron": 26658, "nutshell": 26659, "agra": 26660, "beaded": 26661, "sundaywithmarsha": 26662, "viking": 26663, "proceed": 26664, "maintained": 26665, "thinkbigsundaywithmarsha": 26666, "snes": 26667, "musica": 26668, "tower": 26669, "chab": 26670, "bok": 26671, "smt": 26672, "insult": 26673, "harvesting": 26674, "window": 26675, "ruther": 26676, "beige": 26677, "decal": 26678, "indicate": 26679, "mailing": 26680, "rift": 26681, "pole": 26682, "anderson": 26683, "choral": 26684, "spride": 26685, "lili": 26686, "evelyn": 26687, "imrankhanpti": 26688, "....\"": 26689, "kered": 26690, "undp": 26691, "waterfalls": 26692, "sears": 26693, "lemans": 26694, "worldseries": 26695, "riel": 26696, "anie": 26697, "appar": 26698, "scorers": 26699, "lamp": 26700, "athan": 26701, "physicians": 26702, "quinoa": 26703, "refusing": 26704, "vuitton": 26705, "unleash": 26706, "sla": 26707, "pati": 26708, "shouts": 26709, "intentions": 26710, "foamed": 26711, "european": 26712, "neighborhoods": 26713, "meer": 26714, "manson": 26715, "duh": 26716, "brat": 26717, "cones": 26718, "bowl": 26719, "kazakhstan": 26720, "ि": 26721, "inappropriate": 26722, "delhi": 26723, "ketchup": 26724, "fulton": 26725, "sys": 26726, "consult": 26727, "garfield": 26728, "togo": 26729, "fml": 26730, "fled": 26731, "bds": 26732, "facilitate": 26733, "reebok": 26734, "selfie": 26735, "elevate": 26736, "activate": 26737, "bible": 26738, "cawx": 26739, "bys": 26740, "camille": 26741, "syou": 26742, "skool": 26743, "hert": 26744, "wbc": 26745, "pledges": 26746, "recorder": 26747, "posh": 26748, "acre": 26749, "soaking": 26750, "matil": 26751, "vsco": 26752, "shootings": 26753, "plar": 26754, "econ": 26755, "ðŁĻĮðŁı»": 26756, "rashid": 26757, "ubi": 26758, "ðŁ¤¤": 26759, "swinging": 26760, "wipe": 26761, "raptor": 26762, "msu": 26763, "musicvideo": 26764, "durham": 26765, "attic": 26766, "aparty": 26767, "fetus": 26768, "activation": 26769, "aaz": 26770, "motivate": 26771, "ðŁēķðŁēķðŁēķ": 26772, "jal": 26773, "म": 26774, "agon": 26775, "scheer": 26776, "stalker": 26777, "foster": 26778, "azzo": 26779, "telegram": 26780, "vigor": 26781, "slaugh": 26782, "screenshots": 26783, "entrepreneu": 26784, "kristin": 26785, "intention": 26786, "chilli": 26787, "fraction": 26788, "dona": 26789, "gea": 26790, "tcu": 26791, "site": 26792, "lak": 26793, "emil": 26794, "dnt": 26795, "boro": 26796, "wilkinson": 26797, "recu": 26798, "atoday": 26799, "tanya": 26800, "blanco": 26801, "cdn": 26802, "brilliantly": 26803, "gcc": 26804, "acc": 26805, "evacuated": 26806, "therine": 26807, "denny": 26808, "caitlin": 26809, "shepard": 26810, "pouch": 26811, "handheld": 26812, "southeastern": 26813, "haa": 26814, "Ó": 26815, "resolutions": 26816, "ledger": 26817, "srin": 26818, "rar": 26819, "shattered": 26820, "chimney": 26821, "imwith": 26822, "meteor": 26823, "handled": 26824, "rake": 26825, "townsend": 26826, "enhan": 26827, "shipy": 26828, "duct": 26829, "twx": 26830, "inflammatory": 26831, "warhammer": 26832, "theatrical": 26833, "gros": 26834, "skar": 26835, "scotty": 26836, "niel": 26837, "tito": 26838, "tini": 26839, "connection": 26840, "_.": 26841, "goldenglobes": 26842, "shaq": 26843, "ðŁı³ï¸ı": 26844, "hallway": 26845, "fronts": 26846, "effectiveness": 26847, "glaston": 26848, "dhs": 26849, "expi": 26850, "toh": 26851, "cpl": 26852, "scs": 26853, "reo": 26854, "hag": 26855, "resemblance": 26856, "horan": 26857, "abusive": 26858, "quer": 26859, "virtue": 26860, "cholester": 26861, "aq": 26862, "shane": 26863, "mce": 26864, "carriers": 26865, "distress": 26866, "rewind": 26867, "”": 26868, "voodoo": 26869, "intact": 26870, "anno": 26871, "ðŁĺ¤": 26872, "piled": 26873, "adia": 26874, "ãĄ³": 26875, "enow": 26876, "digs": 26877, "lightly": 26878, "goofy": 26879, "turbine": 26880, "governors": 26881, "conte": 26882, "reopen": 26883, "pah": 26884, "ive": 26885, "crafting": 26886, "sweeps": 26887, "jodi": 26888, "ande": 26889, "zucker": 26890, "kawaii": 26891, "oko": 26892, "vai": 26893, "outline": 26894, "kristi": 26895, "tsn": 26896, "inspo": 26897, "quint": 26898, "filthy": 26899, "lynne": 26900, "listeners": 26901, "departing": 26902, "ord": 26903, "tweed": 26904, ",&": 26905, "alek": 26906, "selfish": 26907, "norther": 26908, "recognizes": 26909, "ips": 26910, "bes": 26911, "aed": 26912, "wills": 26913, "peat": 26914, "surroundings": 26915, "monuments": 26916, "aisle": 26917, "becker": 26918, "lav": 26919, "quantity": 26920, "vah": 26921, "helicopters": 26922, "tucked": 26923, "alvarez": 26924, "shape": 26925, "obey": 26926, "additi": 26927, "roadside": 26928, "mite": 26929, "blers": 26930, "epage": 26931, "jau": 26932, "ignorant": 26933, "bins": 26934, "lulu": 26935, "xo": 26936, "cfo": 26937, "eeeee": 26938, "apprenticeship": 26939, "sheffiel": 26940, "toi": 26941, "hok": 26942, "fakenews": 26943, "deploy": 26944, "aidan": 26945, "huskers": 26946, "ãĢİ": 26947, "westbrook": 26948, "mister": 26949, "configur": 26950, "carr": 26951, "fica": 26952, "proceedings": 26953, "haw": 26954, "steak": 26955, "murderer": 26956, "payday": 26957, "ajo": 26958, "pvc": 26959, "donates": 26960, "biaf": 26961, "nomnom": 26962, "beit": 26963, "kali": 26964, "xrp": 26965, "ahmedabad": 26966, "semic": 26967, "chey": 26968, "xtra": 26969, "antwer": 26970, "headlining": 26971, "squares": 26972, "rounded": 26973, "fluore": 26974, "bold": 26975, "disasters": 26976, "amoo": 26977, "generic": 26978, "cranes": 26979, "briefly": 26980, "gig": 26981, "austerity": 26982, "anticipation": 26983, "forti": 26984, "treasurer": 26985, "canny": 26986, "cecil": 26987, "detected": 26988, "checklist": 26989, "ว": 26990, "pamela": 26991, "barbados": 26992, "anfield": 26993, "hearty": 26994, "txlege": 26995, "perenni": 26996, "arrog": 26997, "ingram": 26998, "âĹı": 26999, "tyne": 27000, "spoon": 27001, "ration": 27002, "amba": 27003, "mbe": 27004, "camel": 27005, "hhs": 27006, "yorkshire": 27007, "reflective": 27008, "freaks": 27009, "tok": 27010, "judo": 27011, "particles": 27012, "dubs": 27013, "banjo": 27014, "accreditation": 27015, "proverbs": 27016, "overdose": 27017, "integral": 27018, "guang": 27019, "mcs": 27020, "supercar": 27021, "afb": 27022, "alvin": 27023, "ails": 27024, "xtre": 27025, "staging": 27026, "twent": 27027, "rabbits": 27028, "maro": 27029, "instem": 27030, "doll": 27031, "cray": 27032, "santana": 27033, "bleach": 27034, "minions": 27035, "cheap": 27036, "mant": 27037, "divers": 27038, "catalonia": 27039, "lois": 27040, "matri": 27041, "cougar": 27042, "kayak": 27043, "egre": 27044, "pso": 27045, "aia": 27046, "Ä®": 27047, "charlton": 27048, "tracked": 27049, "scari": 27050, "pett": 27051, "fwd": 27052, "xin": 27053, "gravel": 27054, "bric": 27055, "biggboss": 27056, "arden": 27057, "hugging": 27058, "palms": 27059, "stv": 27060, "limb": 27061, "themovie": 27062, "handicap": 27063, "rime": 27064, "zai": 27065, "stub": 27066, "india": 27067, "lithuania": 27068, "rhyth": 27069, "pita": 27070, "macedonia": 27071, "highered": 27072, "bridget": 27073, "schwarz": 27074, "skelet": 27075, "hikes": 27076, "antarctic": 27077, "cps": 27078, "mashup": 27079, "а": 27080, "nell": 27081, "chandra": 27082, "heir": 27083, "anus": 27084, "sheridan": 27085, "mimi": 27086, "museu": 27087, "becca": 27088, "anir": 27089, "barrie": 27090, "diocese": 27091, "comparable": 27092, "ðŁı³ï¸ıâĢį": 27093, "yukon": 27094, "mep": 27095, "hormon": 27096, "meric": 27097, "alf": 27098, "conquered": 27099, "christchurch": 27100, "ðŁēĻðŁēĻ": 27101, "hazardous": 27102, "pooh": 27103, "conting": 27104, "retrospective": 27105, "parame": 27106, "nair": 27107, "consor": 27108, "hotra": 27109, "astonishing": 27110, "caterpillar": 27111, "uman": 27112, "tism": 27113, "tvs": 27114, "servic": 27115, "croydon": 27116, "morales": 27117, "cg": 27118, "cum": 27119, "teur": 27120, "scanada": 27121, "sall": 27122, "magnolia": 27123, "elise": 27124, "thour": 27125, "ி": 27126, "agomez": 27127, "phelps": 27128, "ë°©íĄĦìĨĮëħĦëĭ¨": 27129, "whos": 27130, "weaving": 27131, "sisd": 27132, "proposes": 27133, "crows": 27134, "presale": 27135, "economies": 27136, "bernardo": 27137, "shahid": 27138, "airshow": 27139, "mccann": 27140, "horticul": 27141, "nrl": 27142, "duel": 27143, "mongolia": 27144, "toulou": 27145, "requirement": 27146, "structured": 27147, "edi": 27148, "olives": 27149, "hea": 27150, "cuter": 27151, "к": 27152, "enthusiast": 27153, "harriet": 27154, "dominion": 27155, "submer": 27156, "ðŁįĄ": 27157, "saab": 27158, "nesburg": 27159, "moff": 27160, "defended": 27161, "burt": 27162, "rewarded": 27163, "goldman": 27164, "optics": 27165, "khalid": 27166, "households": 27167, "buckets": 27168, "cecil": 27169, "chess": 27170, "substantial": 27171, "efl": 27172, "operation": 27173, "evaluate": 27174, "stn": 27175, "recession": 27176, "lll": 27177, "tomas": 27178, "truths": 27179, "akbar": 27180, "swords": 27181, "pact": 27182, "embarrass": 27183, "hao": 27184, "ayurve": 27185, "scripture": 27186, "nycc": 27187, "opt": 27188, "diameter": 27189, "scented": 27190, "organizers": 27191, "relat": 27192, "hae": 27193, "dreamers": 27194, "dese": 27195, "ðŁĮ»": 27196, "restricted": 27197, "nale": 27198, "rhp": 27199, "dolan": 27200, "munster": 27201, "haired": 27202, "consultants": 27203, "joints": 27204, "humil": 27205, "dill": 27206, "relentless": 27207, "té": 27208, "afil": 27209, "utilities": 27210, "japanese": 27211, "condemn": 27212, "petite": 27213, "collide": 27214, "qf": 27215, "peaches": 27216, "courier": 27217, "lore": 27218, "âĺİï¸ı": 27219, "reliability": 27220, "chuk": 27221, "ðŁĻĄ": 27222, "stures": 27223, "gether": 27224, "hostel": 27225, "bier": 27226, "-_-": 27227, "âĩ": 27228, "eze": 27229, "tailo": 27230, "dient": 27231, "bluff": 27232, "chuffed": 27233, "pilip": 27234, "monarch": 27235, "eem": 27236, "buchan": 27237, "bick": 27238, "opau": 27239, "kups": 27240, "Ć ĀøĀ¢": 27241, "pistons": 27242, "spins": 27243, "mand": 27244, "cest": 27245, "burne": 27246, "vile": 27247, "cherries": 27248, "beckett": 27249, "needles": 27250, "panch": 27251, "ƫĤ": 27252, "hahah": 27253, "troubles": 27254, "insists": 27255, "doyou": 27256, "gmc": 27257, "mortar": 27258, "delegate": 27259, "inn": 27260, "ganda": 27261, "sinatra": 27262, "त": 27263, "speeding": 27264, "pupil": 27265, "premises": 27266, "alignment": 27267, "pikach": 27268, "asus": 27269, "jalan": 27270, "ص": 27271, "limestone": 27272, "folkl": 27273, "parmesan": 27274, "ceil": 27275, "moy": 27276, "shawnmendes": 27277, "acup": 27278, "hust": 27279, "otes": 27280, "medina": 27281, "madi": 27282, "gtav": 27283, "censorship": 27284, "arg": 27285, "sweeney": 27286, "sykes": 27287, "colo": 27288, "footsteps": 27289, "canned": 27290, "advance": 27291, "gtaonline": 27292, "healthyliving": 27293, "ðŁį¾": 27294, "aig": 27295, "pality": 27296, "ocs": 27297, "hebrew": 27298, "imminent": 27299, "berkshire": 27300, "jeremiah": 27301, "outgoing": 27302, "baker": 27303, "entrata": 27304, "maids": 27305, "groves": 27306, "boc": 27307, "adel": 27308, "mfw": 27309, "conscience": 27310, "armys": 27311, "nutella": 27312, "contestalert": 27313, "novelist": 27314, "lah": 27315, "banker": 27316, "marquez": 27317, "ðŁı”": 27318, "toff": 27319, "outage": 27320, "grp": 27321, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, "muscle": 27323, "dudley": 27324, "nvidia": 27325, "midi": 27326, "muni": 27327, "essays": 27328, "datac": 27329, "carter": 27330, "Ć ĀøĀ£": 27331, "tans": 27332, "ives": 27333, "publications": 27334, "aler": 27335, "okwx": 27336, "ilu": 27337, "cutt": 27338, "harp": 27339, "outlaw": 27340, "lutheran": 27341, "brill": 27342, "bolic": 27343, "dowell": 27344, "greenland": 27345, "besties": 27346, "pathi": 27347, "payton": 27348, "guest": 27349, "harden": 27350, "ðŁ¤©": 27351, "anned": 27352, "evacuation": 27353, "poised": 27354, "mcder": 27355, "bhan": 27356, "oi": 27357, "envelope": 27358, "cid": 27359, "cavi": 27360, "tapas": 27361, "bookreview": 27362, "greyhound": 27363, "âĻª": 27364, "feud": 27365, "lungs": 27366, "forte": 27367, "raider": 27368, "ffer": 27369, "onix": 27370, "depend": 27371, "ynwa": 27372, "relating": 27373, "devs": 27374, "ðŁēIJ": 27375, "acquires": 27376, "dha": 27377, "jyo": 27378, "privati": 27379, "canine": 27380, "kb": 27381, "crab": 27382, "sardin": 27383, "imagining": 27384, "kj": 27385, "empor": 27386, "downhill": 27387, "nez": 27388, "taeyeon": 27389, "nickimin": 27390, "gbp": 27391, "àµ": 27392, "wap": 27393, "secco": 27394, "mashed": 27395, "ðŁē„ðŁē„": 27396, "augustine": 27397, "dissol": 27398, "dictator": 27399, "âĵ": 27400, "viper": 27401, "edfringe": 27402, "vaux": 27403, "hardwork": 27404, "booklet": 27405, "nox": 27406, "chiff": 27407, "ðŁē¨": 27408, "observations": 27409, "xboxone": 27410, "usher": 27411, "keer": 27412, "lup": 27413, "dallas": 27414, "calgary": 27415, "madra": 27416, "dious": 27417, "kbs": 27418, "woodward": 27419, "heroine": 27420, "lumber": 27421, "seaworld": 27422, "ows": 27423, "mcke": 27424, "maverick": 27425, "gula": 27426, "crossroads": 27427, "fang": 27428, "sade": 27429, "nikol": 27430, "cheetah": 27431, "mec": 27432, "ppg": 27433, "erick": 27434, "ðŁİµ": 27435, "toxic": 27436, "bjj": 27437, "viola": 27438, "spire": 27439, "chino": 27440, "travis": 27441, "institutional": 27442, "haas": 27443, "lowry": 27444, "wac": 27445, "eae": 27446, "humid": 27447, "mpton": 27448, "ruck": 27449, "jew": 27450, "cine": 27451, "zimmer": 27452, "sef": 27453, "bharat": 27454, "frees": 27455, "aamir": 27456, "ðŁēħ": 27457, "zinc": 27458, "wane": 27459, "multiplayer": 27460, "royalwedding": 27461, "eel": 27462, "precipit": 27463, "query": 27464, "kimberly": 27465, "isabel": 27466, "fulfill": 27467, "igan": 27468, "vaul": 27469, "pane": 27470, "scy": 27471, "digit": 27472, "gunn": 27473, "utah": 27474, "dogday": 27475, "fion": 27476, "xiaomi": 27477, "dac": 27478, "elast": 27479, "chavez": 27480, "roblo": 27481, "gine": 27482, "tenth": 27483, "abh": 27484, "keto": 27485, "hurdle": 27486, "nadia": 27487, "memorabilia": 27488, "habs": 27489, "quan": 27490, "hw": 27491, "hvac": 27492, "pixar": 27493, "eccle": 27494, "kramer": 27495, "accuses": 27496, "ðŁēļðŁēļ": 27497, "perse": 27498, "meantime": 27499, "wahl": 27500, "atletico": 27501, "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, "ottoman": 27503, "novo": 27504, "kus": 27505, "connected": 27506, "trusts": 27507, "dmv": 27508, "spencer": 27509, "rahulg": 27510, "dove": 27511, "stokes": 27512, "bologna": 27513, "enthusiasts": 27514, "ê": 27515, "rockstargames": 27516, "tedcruz": 27517, "duras": 27518, "sacked": 27519, "latex": 27520, "immersive": 27521, "cert": 27522, "lucin": 27523, "principals": 27524, "fares": 27525, "sails": 27526, "farn": 27527, "ament": 27528, "saffron": 27529, "quentin": 27530, "checkpoint": 27531, "ferris": 27532, "excur": 27533, "ðŁijīðŁı¼": 27534, "bailey": 27535, "seh": 27536, "terre": 27537, "madam": 27538, "sband": 27539, "wanderers": 27540, "cumberbatch": 27541, "yyc": 27542, "digitally": 27543, "blackandwhitephotography": 27544, "rollin": 27545, "moroccan": 27546, "ðŁĮħ": 27547, "dinner": 27548, "dwell": 27549, "toom": 27550, "mye": 27551, "ezra": 27552, "cpfc": 27553, "warhol": 27554, "meer": 27555, "jonah": 27556, "noaa": 27557, "sgate": 27558, "soon": 27559, "secular": 27560, "gating": 27561, "tio": 27562, "driver": 27563, "sissy": 27564, "assange": 27565, "tath": 27566, "edmund": 27567, "bobcats": 27568, "raji": 27569, "postage": 27570, "studs": 27571, "mgm": 27572, "kato": 27573, "edinburgh": 27574, "meetthe": 27575, "shirt": 27576, "faa": 27577, "mensfashion": 27578, "spreads": 27579, "wim": 27580, "carts": 27581, "phoebe": 27582, "jars": 27583, "botswana": 27584, "ƙĤ": 27585, "edwar": 27586, "skar": 27587, "rive": 27588, "gusty": 27589, "ctv": 27590, "ferdinand": 27591, "sutherland": 27592, "nickiminaj": 27593, "kv": 27594, "sius": 27595, "beech": 27596, "rez": 27597, "desires": 27598, "onial": 27599, "campo": 27600, "quarry": 27601, "lorraine": 27602, "gilmore": 27603, "iggy": 27604, "µï¸ı": 27605, "hopping": 27606, "aviz": 27607, "ðŁĮº": 27608, "unisex": 27609, "dedicate": 27610, "attitudes": 27611, "steer": 27612, "junkie": 27613, "railway": 27614, "yb": 27615, "whisper": 27616, "keyan": 27617, "kus": 27618, "jug": 27619, "dix": 27620, "ains": 27621, "summon": 27622, "ovich": 27623, "syed": 27624, "herald": 27625, "maison": 27626, "meded": 27627, "wildflower": 27628, "mainland": 27629, "risky": 27630, "rukh": 27631, "overlooked": 27632, "kic": 27633, "destroys": 27634, "naman": 27635, "kip": 27636, "zano": 27637, "championsleague": 27638, "bandit": 27639, "quincy": 27640, "smile": 27641, "calvin": 27642, "openings": 27643, "tapp": 27644, "olulu": 27645, "spectro": 27646, "accredited": 27647, "apk": 27648, "praised": 27649, "barnett": 27650, "pollen": 27651, "premiered": 27652, "selenagomez": 27653, "toured": 27654, "screenings": 27655, "uuu": 27656, "miso": 27657, "ense": 27658, "adamlambert": 27659, "guelph": 27660, "haryana": 27661, "hutto": 27662, "lear": 27663, "ltc": 27664, "poached": 27665, "brexit": 27666, "æĿ": 27667, "ttc": 27668, "pavement": 27669, "mongers": 27670, "roe": 27671, "aders": 27672, "lington": 27673, "participant": 27674, "cared": 27675, "gail": 27676, "yates": 27677, "lantic": 27678, "dashboard": 27679, "joo": 27680, "felipe": 27681, "ssionist": 27682, "bum": 27683, "send": 27684, "aeri": 27685, "thugs": 27686, "lucifer": 27687, "ahe": 27688, "detector": 27689, "filly": 27690, "gasoline": 27691, "hamper": 27692, "humpday": 27693, "theta": 27694, "theband": 27695, "forecasts": 27696, "ohhh": 27697, "lobb": 27698, "holl": 27699, "cpu": 27700, "azu": 27701, "adar": 27702, "hailey": 27703, "bub": 27704, "cart": 27705, "quoted": 27706, "anarchy": 27707, "pancre": 27708, "twitart": 27709, "alden": 27710, "stash": 27711, "theless": 27712, "orni": 27713, "beliebers": 27714, "mormon": 27715, "particle": 27716, "aviation": 27717, "â¬Ĩ": 27718, "webcamtoy": 27719, "saddened": 27720, "cruis": 27721, "hamlet": 27722, "nct": 27723, "rollins": 27724, "marquee": 27725, "sawyer": 27726, "reliance": 27727, "aura": 27728, "diec": 27729, "soothing": 27730, "signings": 27731, "akis": 27732, "ó": 27733, "atkins": 27734, "aerop": 27735, "ðŁĮ¿": 27736, "yab": 27737, "shari": 27738, "connol": 27739, "dubbed": 27740, "manufacture": 27741, "convincing": 27742, "feelthebern": 27743, "rau": 27744, "pulit": 27745, "onec": 27746, "gemstone": 27747, "urging": 27748, "bagu": 27749, "gah": 27750, "acids": 27751, "fianc": 27752, "zodiac": 27753, "snoop": 27754, "herrera": 27755, "initiated": 27756, "venge": 27757, "professors": 27758, "prodi": 27759, "stronger": 27760, "emission": 27761, "bba": 27762, "halle": 27763, "tapp": 27764, "hawan": 27765, "whim": 27766, "competed": 27767, "myrtle": 27768, "irport": 27769, "coldplay": 27770, "ache": 27771, "skep": 27772, "mson": 27773, "ssic": 27774, "calligraphy": 27775, "swimmers": 27776, "mey": 27777, "ppc": 27778, "thrift": 27779, "poc": 27780, "replaces": 27781, "commuter": 27782, "âģ¦âģ¦@": 27783, "goers": 27784, "logue": 27785, "paradig": 27786, "baskets": 27787, "sensitivity": 27788, "johan": 27789, "atlantis": 27790, "&&": 27791, "suitcase": 27792, "anxious": 27793, "lh": 27794, "stri": 27795, "galloway": 27796, "stread": 27797, "warden": 27798, "grounded": 27799, "fficiency": 27800, "lifeat": 27801, "relic": 27802, "disguise": 27803, "islanders": 27804, "fcofficial": 27805, "classicalmusic": 27806, "bmc": 27807, "enfield": 27808, "bique": 27809, "oakley": 27810, "batman": 27811, "slaying": 27812, "nerves": 27813, "multit": 27814, "calcium": 27815, "projector": 27816, "scottsdale": 27817, "antino": 27818, "grips": 27819, "kimmel": 27820, "desmond": 27821, "protestors": 27822, "hiatus": 27823, "metabolism": 27824, "concluded": 27825, "presser": 27826, "tipping": 27827, "slide": 27828, "eto": 27829, "hunting": 27830, "ausopen": 27831, "rik": 27832, "ppery": 27833, "innovators": 27834, "pitchers": 27835, "agger": 27836, "fungi": 27837, "zad": 27838, "prolific": 27839, "rocknroll": 27840, "blames": 27841, "ctar": 27842, "stamford": 27843, "qad": 27844, "mozzarella": 27845, "insanely": 27846, "denver": 27847, "phouse": 27848, "nomad": 27849, "ĆÆĀæ": 27850, "sris": 27851, "produ": 27852, "henley": 27853, "pagan": 27854, "amtrak": 27855, "rubi": 27856, "incl": 27857, "tutor": 27858, "scotia": 27859, "woes": 27860, "singapo": 27861, "funnel": 27862, "turnbull": 27863, "knowledge": 27864, "grimm": 27865, "realmadrid": 27866, "weare": 27867, "missiles": 27868, "consol": 27869, "emojis": 27870, "sneak": 27871, "smiths": 27872, "ruiz": 27873, "brou": 27874, "iel": 27875, "haver": 27876, "ðŁĮļ": 27877, "kingof": 27878, "basilica": 27879, "circulation": 27880, "printers": 27881, "tapping": 27882, "ridley": 27883, "dragged": 27884, "haj": 27885, "writer": 27886, "fundamentals": 27887, "personalities": 27888, "metre": 27889, "stereotypes": 27890, "burle": 27891, "bestof": 27892, "nffc": 27893, "hath": 27894, "ministries": 27895, "aali": 27896, "tracing": 27897, "paved": 27898, "łï¸ı": 27899, "gic": 27900, "inspire": 27901, "tug": 27902, "hare": 27903, "repeated": 27904, "expon": 27905, "lolli": 27906, "rhode": 27907, "precin": 27908, "installations": 27909, "instagram": 27910, "azar": 27911, "ies": 27912, "solely": 27913, "dukes": 27914, "missionary": 27915, "vanguard": 27916, "fursuitfriday": 27917, "ond": 27918, "polari": 27919, "mast": 27920, "haran": 27921, "josé": 27922, "jacked": 27923, "ecoun": 27924, "alities": 27925, "neph": 27926, "ravel": 27927, "moderated": 27928, "scow": 27929, "sfb": 27930, "uruguay": 27931, "aso": 27932, "nig": 27933, "audu": 27934, "pints": 27935, "latina": 27936, "benz": 27937, "mitting": 27938, "charted": 27939, "matology": 27940, "citro": 27941, "biopic": 27942, "ðŁijŃ": 27943, "djokovic": 27944, "foxy": 27945, "aguil": 27946, "soto": 27947, "anada": 27948, "sinking": 27949, "scrap": 27950, "hairs": 27951, "bethany": 27952, "factfriday": 27953, "ðŁIJIJ": 27954, "unleashed": 27955, ")(": 27956, "contradic": 27957, "ramon": 27958, "coastline": 27959, "yong": 27960, "snsd": 27961, "ligan": 27962, "pome": 27963, "mitage": 27964, "gett": 27965, "wati": 27966, "risk": 27967, "soaring": 27968, "brush": 27969, "fpl": 27970, "avan": 27971, "ÄĨ": 27972, "larson": 27973, "shear": 27974, "multil": 27975, "blur": 27976, "multimedia": 27977, "chunky": 27978, "pari": 27979, "nani": 27980, "weird": 27981, "cholesterol": 27982, "charles": 27983, "dreamed": 27984, "tanning": 27985, "puzzles": 27986, "fram": 27987, "handball": 27988, "chag": 27989, "belize": 27990, "alu": 27991, "bangs": 27992, "ƑĦ": 27993, "detectives": 27994, "mcg": 27995, "ishq": 27996, "bothered": 27997, "safc": 27998, "mping": 27999, "teneri": 28000, "gays": 28001, "sailor": 28002, "angi": 28003, "multicul": 28004, "guessed": 28005, "rosé": 28006, "highways": 28007, "broom": 28008, "chattanoo": 28009, "-'": 28010, "seeker": 28011, "oned": 28012, "atf": 28013, "luc": 28014, "><": 28015, "bari": 28016, "percep": 28017, "jewelry": 28018, "asph": 28019, "sorrow": 28020, "sling": 28021, "mammoth": 28022, "jackie": 28023, "ë§": 28024, "wiltshire": 28025, "sao": 28026, "cancell": 28027, "impaired": 28028, "torial": 28029, "breed": 28030, "guyen": 28031, "judice": 28032, "title": 28033, "prospective": 28034, "applicants": 28035, "ðŁįĬ": 28036, "episcop": 28037, "eid": 28038, "byo": 28039, "stockings": 28040, "ðŁēĄðŁēĄ": 28041, "llp": 28042, "snag": 28043, "keepit": 28044, "lough": 28045, "olson": 28046, "maturity": 28047, "!!!\"": 28048, "copter": 28049, "isha": 28050, "bli": 28051, "wilmington": 28052, "tryouts": 28053, "thai": 28054, "ðŁ„³": 28055, "pebble": 28056, "kraft": 28057, "fp": 28058, "º": 28059, "ssively": 28060, "livin": 28061, "contestants": 28062, "textures": 28063, "joan": 28064, "hdr": 28065, "filmfestival": 28066, "provence": 28067, "wido": 28068, "opend": 28069, "csi": 28070, "stown": 28071, "croati": 28072, "adjust": 28073, "hostile": 28074, "analysts": 28075, "ilan": 28076, "cuppa": 28077, "brum": 28078, "newfoundland": 28079, "goodwin": 28080, "mett": 28081, "mallorca": 28082, "plugs": 28083, "buk": 28084, "bbhutto": 28085, "wrestle": 28086, "saire": 28087, "shopped": 28088, "forza": 28089, "lehead": 28090, "vivo": 28091, "bast": 28092, "roxy": 28093, "regis": 28094, "hardworking": 28095, "honolulu": 28096, "despair": 28097, "youngsters": 28098, "nig": 28099, "impromp": 28100, "rolltide": 28101, "deemed": 28102, "treason": 28103, "rushed": 28104, "forged": 28105, "fff": 28106, "pikachu": 28107, "briggs": 28108, "doit": 28109, "accent": 28110, "laus": 28111, "glaze": 28112, "competent": 28113, "aho": 28114, "photog": 28115, "midfield": 28116, "lego": 28117, "harvard": 28118, "minorities": 28119, "reilly": 28120, "sliced": 28121, "onceupon": 28122, "initially": 28123, "financially": 28124, "landscapephotography": 28125, "hardro": 28126, "quo": 28127, "mmers": 28128, "parkinson": 28129, "smugg": 28130, "readiness": 28131, "brutally": 28132, "gloucester": 28133, "mped": 28134, "bbhuttozardari": 28135, "murder": 28136, "yed": 28137, "dataviz": 28138, "srt": 28139, "downing": 28140, "bians": 28141, "mü": 28142, "fleck": 28143, "flipped": 28144, "sly": 28145, "brilliance": 28146, "rim": 28147, "kum": 28148, "bubba": 28149, "koi": 28150, "knitted": 28151, "sorg": 28152, "mais": 28153, "ðŁĮ²": 28154, "tiss": 28155, "sustain": 28156, "sensu": 28157, "akhan": 28158, "ziest": 28159, "examines": 28160, "chardonnay": 28161, "username": 28162, "shortlist": 28163, "rebs": 28164, "ono": 28165, "daring": 28166, "hardwood": 28167, "cheque": 28168, "righteous": 28169, "lightening": 28170, "dirk": 28171, "shradd": 28172, "dura": 28173, "downstairs": 28174, "shal": 28175, "amigos": 28176, "ruff": 28177, "slaw": 28178, "ries": 28179, "rednation": 28180, "manus": 28181, "ðŁĩ§ðŁĩ·": 28182, "distinction": 28183, "ubun": 28184, "duran": 28185, "migra": 28186, "thians": 28187, "laver": 28188, "domestic": 28189, "kx": 28190, "jazzy": 28191, "justify": 28192, "belonging": 28193, "insulation": 28194, "colorstv": 28195, "drunken": 28196, "channeling": 28197, "quand": 28198, "xiii": 28199, "enlighten": 28200, "kano": 28201, "fatima": 28202, "teenchoice": 28203, "terrified": 28204, "pba": 28205, "asley": 28206, "metmuseum": 28207, "dune": 28208, "packer": 28209, "kio": 28210, "ðŁēľðŁēľ": 28211, "boiler": 28212, "fascism": 28213, "armored": 28214, "backgrounds": 28215, "inmates": 28216, "embarrassed": 28217, "defines": 28218, "thd": 28219, "wego": 28220, "silicone": 28221, "loon": 28222, "elding": 28223, "borrowed": 28224, "hemp": 28225, "aksh": 28226, "kawasaki": 28227, "bry": 28228, "deaf": 28229, "killer": 28230, "disposal": 28231, "ðŁĩ°": 28232, "glastonbury": 28233, "uncovered": 28234, "oxide": 28235, "poff": 28236, "dant": 28237, "kj": 28238, "kuro": 28239, "drizzle": 28240, "peoples": 28241, "fee": 28242, "propri": 28243, "ddlovato": 28244, "piggy": 28245, "otis": 28246, "allergies": 28247, "ubis": 28248, "penguin": 28249, "sera": 28250, "viz": 28251, "prosperous": 28252, "icides": 28253, "tornadoes": 28254, "senegal": 28255, "webcast": 28256, "stored": 28257, "enchanted": 28258, "bbcone": 28259, "bayarea": 28260, "entrepreneurial": 28261, "rednationrising": 28262, "experimenting": 28263, "angan": 28264, "lotto": 28265, "theyre": 28266, "pore": 28267, "erp": 28268, "serene": 28269, "eastwood": 28270, "brokers": 28271, "barge": 28272, "stallion": 28273, "timberlake": 28274, "tailored": 28275, "dystop": 28276, "bate": 28277, "lators": 28278, "dixit": 28279, "branson": 28280, "dynamo": 28281, "kylie": 28282, "shameful": 28283, "btwn": 28284, "springtime": 28285, "mixture": 28286, "sounded": 28287, "luton": 28288, "dades": 28289, "mala": 28290, "opra": 28291, "enic": 28292, "rahulgandhi": 28293, "sewer": 28294, "~~~~": 28295, "kyu": 28296, "northeastern": 28297, "caer": 28298, "bcu": 28299, "nirvana": 28300, "kitchens": 28301, "ousy": 28302, "alm": 28303, "riverdale": 28304, "hidden": 28305, "flint": 28306, "spd": 28307, "patrons": 28308, "katyperry": 28309, "augh": 28310, "exhibitions": 28311, "smc": 28312, "shuts": 28313, "atore": 28314, "dain": 28315, "something": 28316, "berth": 28317, "bog": 28318, "porter": 28319, "gento": 28320, "concussion": 28321, "anglic": 28322, "rowe": 28323, "grilling": 28324, "scarlett": 28325, "mastering": 28326, "mornin": 28327, "commented": 28328, "sime": 28329, "sizing": 28330, "christy": 28331, "ceos": 28332, "stm": 28333, "atry": 28334, "tariffs": 28335, "vacation": 28336, "prejudice": 28337, "psu": 28338, "parental": 28339, "farage": 28340, "cana": 28341, "capcom": 28342, "kosovo": 28343, "youre": 28344, "menstru": 28345, "stalin": 28346, "grapefruit": 28347, "bran": 28348, "chesa": 28349, "daven": 28350, "excel": 28351, "!!)": 28352, "à¹Į": 28353, "distributor": 28354, "cea": 28355, "bridesma": 28356, "millennial": 28357, "wain": 28358, "observing": 28359, "misery": 28360, "planetary": 28361, "exposing": 28362, "braised": 28363, "compton": 28364, "dongha": 28365, "ql": 28366, "springsteen": 28367, "thul": 28368, "sylve": 28369, "cabo": 28370, "palad": 28371, "nielsen": 28372, "gazing": 28373, "baja": 28374, "roud": 28375, "orchids": 28376, "johannesburg": 28377, "seman": 28378, "dji": 28379, "operative": 28380, "affection": 28381, "eclectic": 28382, "atc": 28383, "mutant": 28384, "awx": 28385, "nice": 28386, "melbourne": 28387, "indulg": 28388, "tulip": 28389, "diaspora": 28390, "welp": 28391, "biggie": 28392, "mississauga": 28393, "retriever": 28394, "oran": 28395, "tammy": 28396, "cta": 28397, "hippo": 28398, "seasoned": 28399, "germans": 28400, "engv": 28401, "marvellous": 28402, "imf": 28403, "relays": 28404, "montan": 28405, "mauriti": 28406, "meister": 28407, "assurance": 28408, "reigning": 28409, "sufficient": 28410, "hane": 28411, "nothing": 28412, "posse": 28413, "navy": 28414, "inlove": 28415, "brighton": 28416, "enqu": 28417, "chung": 28418, "sweaty": 28419, "esc": 28420, "caled": 28421, "mans": 28422, "nicaragua": 28423, "slices": 28424, "mocha": 28425, "washingtonpost": 28426, "bbn": 28427, "damned": 28428, "growing": 28429, "enburg": 28430, "loan": 28431, "mes": 28432, "whoops": 28433, "believers": 28434, "spiel": 28435, "vodaf": 28436, "lat": 28437, "sled": 28438, "cricketer": 28439, "browne": 28440, "golfers": 28441, "barra": 28442, "watchers": 28443, "luigi": 28444, "swamy": 28445, "moms": 28446, "pitched": 28447, "santor": 28448, "crs": 28449, "sire": 28450, "scamp": 28451, "bode": 28452, "stewar": 28453, "jonny": 28454, "entity": 28455, "pacqui": 28456, "mindful": 28457, "minindia": 28458, "bearded": 28459, "tempt": 28460, "scorpion": 28461, "eaton": 28462, "authorized": 28463, "arto": 28464, "svp": 28465, "opathy": 28466, "cchini": 28467, "housemusic": 28468, "disneyworld": 28469, "âĢĶ@": 28470, "propose": 28471, "diy": 28472, "expense": 28473, "teng": 28474, "puppets": 28475, "smel": 28476, "daca": 28477, "perry": 28478, "finn": 28479, "boosting": 28480, "leftovers": 28481, "cougs": 28482, "satellites": 28483, "many": 28484, "aze": 28485, "gong": 28486, "fie": 28487, "methodo": 28488, "ferries": 28489, "ð٤Ķð٤Ķ": 28490, "explorers": 28491, "loader": 28492, "attracted": 28493, "ilton": 28494, "goddamn": 28495, "piazza": 28496, "doctr": 28497, "saving": 28498, "paragraph": 28499, "visualization": 28500, "mayors": 28501, "workflow": 28502, "ackles": 28503, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, "स": 28505, "twerk": 28506, "clut": 28507, "lover": 28508, "teases": 28509, "sian": 28510, "ote": 28511, "deterior": 28512, "accord": 28513, "lfw": 28514, "swarovski": 28515, "natal": 28516, "traps": 28517, "kina": 28518, "analyze": 28519, "layered": 28520, "beverages": 28521, "unit": 28522, "ransom": 28523, "peshaw": 28524, "destined": 28525, "astrology": 28526, "sipping": 28527, "mileycyrus": 28528, "camino": 28529, "marshmallow": 28530, "bliss": 28531, "outback": 28532, "faq": 28533, "intoler": 28534, "humility": 28535, "poppin": 28536, "halloween": 28537, "montene": 28538, "ophy": 28539, "nun": 28540, "tattooed": 28541, "aas": 28542, "ðŁĮ³": 28543, "daley": 28544, "quality": 28545, "dusa": 28546, "fishermen": 28547, "swif": 28548, "terrac": 28549, "stau": 28550, "lein": 28551, "trolling": 28552, "shipment": 28553, "gardener": 28554, "marchmadness": 28555, "headband": 28556, "grt": 28557, "burnett": 28558, "wand": 28559, "!!!!!!!!!": 28560, "ghe": 28561, "dux": 28562, "hud": 28563, "warner": 28564, "ðŁĩ¦": 28565, "exile": 28566, "rescue": 28567, "rata": 28568, "dhan": 28569, "ducati": 28570, "drown": 28571, "blends": 28572, "spie": 28573, "alligator": 28574, "simultaneously": 28575, "brooke": 28576, "uke": 28577, "khar": 28578, "communion": 28579, "rika": 28580, "fordfc": 28581, "chinatown": 28582, "yourown": 28583, "mey": 28584, "canal": 28585, "systematic": 28586, "depri": 28587, "oxford": 28588, "anil": 28589, "wut": 28590, "equation": 28591, "bez": 28592, "fleur": 28593, "thegood": 28594, "langley": 28595, "adity": 28596, "edith": 28597, "alfie": 28598, "оÑĤ": 28599, "encry": 28600, "brill": 28601, "exemp": 28602, "cesar": 28603, "mbling": 28604, "abri": 28605, "scicom": 28606, "jing": 28607, "schooling": 28608, "mika": 28609, "mechanisms": 28610, "impromptu": 28611, "rhea": 28612, "moore": 28613, "crimea": 28614, "besto": 28615, "wright": 28616, "elders": 28617, "rods": 28618, "kamal": 28619, "folklore": 28620, "beet": 28621, "minion": 28622, "relieve": 28623, "thro": 28624, "teamusa": 28625, "pascal": 28626, "madewith": 28627, "bolivia": 28628, "itti": 28629, "freebies": 28630, "desired": 28631, "bestselling": 28632, "liness": 28633, "laden": 28634, "keane": 28635, "mists": 28636, "hippie": 28637, "attachment": 28638, "@/": 28639, "sew": 28640, "flanagan": 28641, "âĿĹï¸ı": 28642, "supremac": 28643, "stlcards": 28644, "sias": 28645, "qu": 28646, "rhys": 28647, "steep": 28648, "valleys": 28649, "vw": 28650, "paving": 28651, "dispat": 28652, "alison": 28653, "porte": 28654, "idu": 28655, "newsc": 28656, "socket": 28657, "mos": 28658, "costar": 28659, "revo": 28660, "proteins": 28661, "stanleycup": 28662, "mcal": 28663, "earring": 28664, "secs": 28665, "mclean": 28666, "capric": 28667, "nickelo": 28668, "aden": 28669, "vc": 28670, "shouse": 28671, "adaptive": 28672, "maximize": 28673, "entertainer": 28674, "prose": 28675, "griffi": 28676, "sixteen": 28677, "lamar": 28678, "mirage": 28679, "saudiarabia": 28680, "aweather": 28681, "rust": 28682, "infiltr": 28683, "fashionweek": 28684, "ðŁĺĬðŁĺĬðŁĺĬ": 28685, "selective": 28686, "bubble": 28687, "aden": 28688, "fennel": 28689, "decisive": 28690, "mta": 28691, "mocking": 28692, "mbles": 28693, "stamp": 28694, "mule": 28695, "bernardo": 28696, "grin": 28697, "pott": 28698, "jingle": 28699, "vettel": 28700, "colombian": 28701, "camo": 28702, "motivationmonday": 28703, "bahan": 28704, "ply": 28705, "dhary": 28706, "kami": 28707, "xmen": 28708, "sleeper": 28709, "gara": 28710, "mysti": 28711, "confidential": 28712, "conflicts": 28713, "pneu": 28714, "ces": 28715, "insurtech": 28716, "cleanse": 28717, "merely": 28718, "vais": 28719, "tux": 28720, "thegreat": 28721, "sharon": 28722, "maj": 28723, "hola": 28724, "ecosystems": 28725, "ajay": 28726, "aaj": 28727, "hush": 28728, "harmon": 28729, "backtoschool": 28730, "wikileaks": 28731, "reflected": 28732, "ðŁĺĵ": 28733, "commemorating": 28734, "acet": 28735, "buckingham": 28736, "messiah": 28737, "tuous": 28738, "hornet": 28739, "tobe": 28740, "dq": 28741, "heine": 28742, "mig": 28743, "plate": 28744, "nicholson": 28745, "spie": 28746, "cumberland": 28747, "normal": 28748, "phobia": 28749, "happyhalloween": 28750, "cityfc": 28751, "mcel": 28752, "gillian": 28753, "keto": 28754, "lude": 28755, "demise": 28756, "suga": 28757, "strate": 28758, "mcgrath": 28759, "visitscotland": 28760, "fooled": 28761, "cbr": 28762, "gcse": 28763, "colori": 28764, "potd": 28765, "missuniverse": 28766, "finances": 28767, "mapoli": 28768, "forks": 28769, "ؓ": 28770, "cannon": 28771, "medicinal": 28772, "ðŁĹĵ": 28773, "kho": 28774, "wreck": 28775, "panto": 28776, "bagel": 28777, "gull": 28778, "syndicate": 28779, "icy": 28780, "prc": 28781, "kien": 28782, "zika": 28783, "tish": 28784, "peta": 28785, "cco": 28786, "liza": 28787, "chut": 28788, "extraction": 28789, "elg": 28790, "gli": 28791, "fueled": 28792, "posit": 28793, "respectively": 28794, "leicester": 28795, "brink": 28796, "vulnerability": 28797, "imported": 28798, "esha": 28799, "ð٦ħ": 28800, "rural": 28801, "rell": 28802, "gaming": 28803, "atlantic": 28804, "abandon": 28805, "noah": 28806, "resolved": 28807, "prostate": 28808, "allergic": 28809, "psd": 28810, "âĺ¹": 28811, "dungeon": 28812, "fangirl": 28813, "illuminated": 28814, "mhs": 28815, "whitesox": 28816, "dently": 28817, "cko": 28818, "endorse": 28819, "overly": 28820, "dazzling": 28821, "prioriti": 28822, "nightlife": 28823, "util": 28824, "behave": 28825, "flamen": 28826, "eastbound": 28827, "ðŁēŁ": 28828, "iloveyou": 28829, "govuk": 28830, "mozambique": 28831, "allegi": 28832, "dri": 28833, "testimonial": 28834, "aths": 28835, "ì§Ģ": 28836, "mmy": 28837, "shabby": 28838, "prosecco": 28839, "friendships": 28840, "calam": 28841, "damages": 28842, "offset": 28843, "jurassic": 28844, "juno": 28845, "arrell": 28846, "ðŁē©": 28847, "interventions": 28848, "daredevil": 28849, "carver": 28850, "runaway": 28851, "rane": 28852, "trustees": 28853, "haute": 28854, "depths": 28855, "ðŁİŃ": 28856, "mein": 28857, "sacrifices": 28858, "concier": 28859, "nesting": 28860, "izzy": 28861, "metam": 28862, "ilovemy": 28863, "urine": 28864, "dulu": 28865, "malhotra": 28866, "veins": 28867, "nightly": 28868, "coat": 28869, "andi": 28870, "hewitt": 28871, "lonel": 28872, "cible": 28873, "write": 28874, "jennie": 28875, "santac": 28876, "ĸï¸ı": 28877, "strato": 28878, "singapore": 28879, "soprano": 28880, "kristen": 28881, "cheerful": 28882, "fleetwood": 28883, "fairi": 28884, "meli": 28885, "wast": 28886, "turnt": 28887, "sforsale": 28888, "scrolling": 28889, "angelina": 28890, "rendition": 28891, "jericho": 28892, "nicky": 28893, "orb": 28894, "flavo": 28895, "patriot": 28896, "asheville": 28897, "sickness": 28898, "refund": 28899, "aggression": 28900, "bpl": 28901, "ãĄĄ": 28902, "elusive": 28903, "thistory": 28904, "hanger": 28905, "buffs": 28906, "villas": 28907, "atkinson": 28908, "sph": 28909, "jait": 28910, "declined": 28911, "wok": 28912, "supremacy": 28913, "ootball": 28914, "eyang": 28915, "ðŁİĵ": 28916, "sford": 28917, "athi": 28918, "consume": 28919, "roadster": 28920, "eso": 28921, "upro": 28922, "recipe": 28923, "auf": 28924, "uci": 28925, "aron": 28926, "oooh": 28927, "csgo": 28928, "reich": 28929, "mcd": 28930, "minute": 28931, "ladies": 28932, "punk": 28933, "rutgers": 28934, "meek": 28935, "arizon": 28936, "taj": 28937, "landlord": 28938, "degra": 28939, "autumn": 28940, "lynx": 28941, "usf": 28942, "bhi": 28943, "fairytale": 28944, "donghae": 28945, "betsy": 28946, "exploded": 28947, "chennai": 28948, "opa": 28949, "protag": 28950, "brant": 28951, "ðŁĵ°:": 28952, "gf": 28953, "palli": 28954, "ðŁı¼âĢįâĻĢï¸ı": 28955, "sut": 28956, "illini": 28957, "columnist": 28958, "shirtless": 28959, "decentr": 28960, "searched": 28961, "ecor": 28962, "buggy": 28963, "sack": 28964, "ðŁĺĤðŁĺŃ": 28965, "det": 28966, "theri": 28967, "ornaments": 28968, "bringback": 28969, "tov": 28970, "quarterfinals": 28971, "iche": 28972, "constra": 28973, "gier": 28974, "buchanan": 28975, "vix": 28976, "kayaking": 28977, "mustread": 28978, "swallow": 28979, "melb": 28980, "scaf": 28981, "opal": 28982, "mayoral": 28983, "harat": 28984, "ð٦ĭ": 28985, "schedules": 28986, "idf": 28987, "hague": 28988, "roz": 28989, "aah": 28990, "dmc": 28991, "duplic": 28992, "cache": 28993, "orphan": 28994, "fracture": 28995, "recon": 28996, "chav": 28997, "bunnies": 28998, "alain": 28999, "mustafa": 29000, "ðŁİĻ": 29001, "vacations": 29002, "dynamite": 29003, "texted": 29004, "broadcaster": 29005, "ðŁē£": 29006, "steamed": 29007, "rocker": 29008, "dietary": 29009, "luxurytravel": 29010, "inaugurated": 29011, "sawards": 29012, "vaughn": 29013, "lincolnshire": 29014, "clicked": 29015, "kraja": 29016, "fanc": 29017, "removes": 29018, "layoffs": 29019, "mcfar": 29020, "breeds": 29021, "winnie": 29022, "jonghyun": 29023, "incentive": 29024, "variations": 29025, "patton": 29026, "aturday": 29027, "persistent": 29028, "prun": 29029, "piers": 29030, "dales": 29031, "æĸ": 29032, "breastfeeding": 29033, "rance": 29034, "tawa": 29035, "Ĥâĸ": 29036, "murdoch": 29037, "captive": 29038, "thistle": 29039, "nica": 29040, "commodity": 29041, "couldnt": 29042, "boardwalk": 29043, "gracious": 29044, "practitioners": 29045, "ngc": 29046, "scrum": 29047, "nero": 29048, "camouflage": 29049, "colon": 29050, "hei": 29051, "physicist": 29052, "saturdaymorning": 29053, "tener": 29054, "siwon": 29055, "columns": 29056, "brune": 29057, "yvr": 29058, "bair": 29059, "retires": 29060, "halam": 29061, "caber": 29062, "shazam": 29063, "minu": 29064, "cascade": 29065, "milkshake": 29066, "grid": 29067, "dren": 29068, "vincent": 29069, "sodium": 29070, "platter": 29071, "cheerleader": 29072, "chenko": 29073, "yak": 29074, "eliminated": 29075, "typo": 29076, "yman": 29077, "rethink": 29078, "âĿĹ": 29079, "tsville": 29080, "bernardokath": 29081, "extr": 29082, "ðŁĺģðŁĺģðŁĺģ": 29083, "tao": 29084, "reper": 29085, "moths": 29086, "empowered": 29087, "citing": 29088, "transported": 29089, "monks": 29090, "sanat": 29091, "clears": 29092, "bachelorette": 29093, "campbell": 29094, "rachael": 29095, "harle": 29096, "handler": 29097, "climbs": 29098, "interference": 29099, "release": 29100, "shand": 29101, "rbs": 29102, "hrh": 29103, "ãģª": 29104, "valle": 29105, "ré": 29106, "slime": 29107, "wakes": 29108, "chubby": 29109, "sloan": 29110, "elves": 29111, "athen": 29112, "attorneys": 29113, "microscope": 29114, "stoner": 29115, "scaling": 29116, "obe": 29117, "cout": 29118, "seman": 29119, "midweek": 29120, "balsam": 29121, "ðŁĺįâĿ¤": 29122, "tiful": 29123, "vish": 29124, "lotta": 29125, "ripping": 29126, "remn": 29127, "tire": 29128, "leap": 29129, "havent": 29130, "laby": 29131, "himach": 29132, "whispers": 29133, "wein": 29134, "ðŁİ¸": 29135, "wildflowers": 29136, "sele": 29137, "ucc": 29138, "liability": 29139, "azine": 29140, "swings": 29141, "kya": 29142, "tair": 29143, "remain": 29144, "edo": 29145, "flops": 29146, "pocket": 29147, "grandad": 29148, "examiner": 29149, "gris": 29150, "ffect": 29151, "ðŁijĬðŁı»": 29152, "studded": 29153, "heartbeat": 29154, "deacon": 29155, "firmly": 29156, "infectious": 29157, "stef": 29158, "outlines": 29159, "leasing": 29160, "claws": 29161, "sense": 29162, "tabs": 29163, "hoot": 29164, "mosul": 29165, "spawn": 29166, "coa": 29167, "hogwarts": 29168, "vein": 29169, "albania": 29170, "manuel": 29171, "bino": 29172, "vauxhall": 29173, "scotland": 29174, "gobucks": 29175, "matty": 29176, "physio": 29177, "torino": 29178, "constable": 29179, "investigated": 29180, "slower": 29181, "mistaken": 29182, "bayer": 29183, "wildfires": 29184, "voic": 29185, "xon": 29186, "timeto": 29187, "chassis": 29188, "barric": 29189, "pion": 29190, "baldhead": 29191, "wook": 29192, "registr": 29193, "drafts": 29194, "bhs": 29195, "ligue": 29196, "lick": 29197, "staffordshire": 29198, "bafta": 29199, "darry": 29200, "jeanne": 29201, "vending": 29202, "corp": 29203, "âĽ³ï¸ı": 29204, "kiddos": 29205, "fenway": 29206, "cao": 29207, "westbound": 29208, "ðŁĺĻ": 29209, "dvr": 29210, "quicker": 29211, "blah": 29212, "goodie": 29213, "ðŁēĭðŁēĭ": 29214, "vox": 29215, "esper": 29216, "facade": 29217, "correlation": 29218, "redbull": 29219, "roup": 29220, "declining": 29221, "chive": 29222, "mcgee": 29223, "turo": 29224, "inder": 29225, "feller": 29226, "fug": 29227, "ilysm": 29228, "mardi": 29229, "peshawar": 29230, "kieran": 29231, "inema": 29232, "meatballs": 29233, "peck": 29234, "depressing": 29235, "sensing": 29236, "giz": 29237, "ddington": 29238, "springwatch": 29239, "roaming": 29240, "yellowstone": 29241, "horseshoe": 29242, "amman": 29243, "weekday": 29244, "olor": 29245, "ðŁ„°": 29246, "boosts": 29247, "sprint": 29248, "scarves": 29249, "jee": 29250, "beetro": 29251, "clan": 29252, "allthe": 29253, "ìĦ¸ë": 29254, "enlightenment": 29255, "adobe": 29256, "regeneration": 29257, "?@": 29258, "contag": 29259, "yachts": 29260, "tou": 29261, "mora": 29262, "envoy": 29263, "rani": 29264, "goli": 29265, "dhanushkraja": 29266, "woodworking": 29267, "strengths": 29268, "sedi": 29269, "discs": 29270, "arina": 29271, "scon": 29272, "lite": 29273, "another": 29274, "ðŁ„Ĭ": 29275, "yemen": 29276, "guern": 29277, "savvy": 29278, "loyed": 29279, "biomed": 29280, "heartbreak": 29281, "comrades": 29282, "millie": 29283, "patch": 29284, "unf": 29285, "jarvis": 29286, "blaming": 29287, "commemoration": 29288, "gey": 29289, "Ą": 29290, "cardiovascular": 29291, "aligned": 29292, "document": 29293, ".?": 29294, "aesthetics": 29295, "emu": 29296, "theirs": 29297, "leh": 29298, "psic": 29299, "sif": 29300, "plateau": 29301, "expend": 29302, "dominating": 29303, "robes": 29304, "mauritius": 29305, "exceptionally": 29306, "homer": 29307, "discoveries": 29308, "braun": 29309, "tennant": 29310, "insulin": 29311, "ðŁİ®": 29312, "carbs": 29313, "teas": 29314, "?!\"": 29315, "zie": 29316, "francois": 29317, "browsing": 29318, "thol": 29319, "clarence": 29320, "helper": 29321, "obtained": 29322, "cassie": 29323, "lees": 29324, "!,": 29325, "pomegran": 29326, "hubs": 29327, "prestige": 29328, "][": 29329, "macher": 29330, "bottled": 29331, "punch": 29332, "pipe": 29333, "och": 29334, "gallons": 29335, "deliveries": 29336, "ura": 29337, "unday": 29338, "monde": 29339, "depicts": 29340, "regency": 29341, "outrageous": 29342, "khaled": 29343, "caro": 29344, "hearti": 29345, "zag": 29346, "developmental": 29347, "overcoming": 29348, "statistical": 29349, "flavored": 29350, "fords": 29351, "creatives": 29352, "laurence": 29353, "dias": 29354, "sunscreen": 29355, "inked": 29356, "preacher": 29357, "nul": 29358, "impacting": 29359, "autistic": 29360, "âļĶï¸ı": 29361, "oss": 29362, "pelicans": 29363, "celeste": 29364, "vb": 29365, "rump": 29366, "mcgra": 29367, "fairfax": 29368, "humor": 29369, "bbcnews": 29370, "rowling": 29371, "calder": 29372, "seamless": 29373, "agne": 29374, "pti": 29375, "mixed": 29376, "tshirts": 29377, "merci": 29378, "btob": 29379, "womeninstem": 29380, "genealogy": 29381, "preven": 29382, "lour": 29383, "cradle": 29384, "giuse": 29385, "о": 29386, "chrono": 29387, "fairness": 29388, "chocolate": 29389, "tory": 29390, "asda": 29391, "prescott": 29392, "stretched": 29393, "alman": 29394, "uil": 29395, "recharge": 29396, "intre": 29397, "obst": 29398, "hospital": 29399, "hayward": 29400, "tenerife": 29401, "friedman": 29402, "vaping": 29403, "confessions": 29404, "yeah": 29405, "balli": 29406, "lucknow": 29407, "corpse": 29408, "sculptor": 29409, "ampton": 29410, "tpp": 29411, "indicates": 29412, "surplus": 29413, "truman": 29414, "ðĿĻ": 29415, "sinha": 29416, "invo": 29417, "sovereign": 29418, "kev": 29419, "establishing": 29420, "engraved": 29421, "assuming": 29422, "ðŁıģ": 29423, "souza": 29424, "fabi": 29425, "toned": 29426, "ounge": 29427, "deloit": 29428, "downey": 29429, "noble": 29430, "omor": 29431, "cartridge": 29432, "ðŁıIJ": 29433, "uhur": 29434, "holloway": 29435, "successes": 29436, "rsa": 29437, "âĦ¢": 29438, "mazz": 29439, "twd": 29440, "discourse": 29441, ".<": 29442, "yat": 29443, "satisfy": 29444, "compri": 29445, "ह": 29446, "graphite": 29447, "dissertation": 29448, "arter": 29449, "ƭĶ": 29450, "bally": 29451, "zombi": 29452, "lyons": 29453, "aic": 29454, "ubc": 29455, "prada": 29456, "eil": 29457, "dax": 29458, "clai": 29459, "granddaughter": 29460, "extravaganza": 29461, "challenge": 29462, "ð٤ŀ": 29463, "pover": 29464, "primarily": 29465, "daddy": 29466, "mana": 29467, "bikers": 29468, "inquiries": 29469, "daun": 29470, "feline": 29471, "generative": 29472, "hef": 29473, "benefiting": 29474, "lindsey": 29475, "polka": 29476, "demonstrated": 29477, "alle": 29478, "randy": 29479, "osu": 29480, "lowkey": 29481, "weirdest": 29482, "redbull": 29483, "oury": 29484, "nous": 29485, "woodstock": 29486, "credenti": 29487, "nicer": 29488, "gado": 29489, "alyss": 29490, "aph": 29491, "preparedness": 29492, "stationary": 29493, "incorporated": 29494, "dyer": 29495, "saratoga": 29496, "celesti": 29497, ":\"": 29498, "antibiotics": 29499, "orgs": 29500, "indefin": 29501, "apron": 29502, "иÐ": 29503, "fifteen": 29504, "nof": 29505, "ðŁĶĿ": 29506, "phx": 29507, "tega": 29508, "mz": 29509, "organizational": 29510, "onair": 29511, "bandung": 29512, "pleasures": 29513, "mori": 29514, "secretari": 29515, "raccoon": 29516, "cashi": 29517, "pilates": 29518, "kon": 29519, "geoffrey": 29520, "lao": 29521, "kamp": 29522, "departments": 29523, "backpacking": 29524, "anam": 29525, "ë": 29526, "crackdown": 29527, "aunty": 29528, "ondo": 29529, "lizzie": 29530, "phers": 29531, "cun": 29532, "ðŁĩ±": 29533, "kpop": 29534, "put": 29535, "intentional": 29536, "connolly": 29537, "barclays": 29538, "hsfb": 29539, "swindon": 29540, "uku": 29541, "sally": 29542, "aint": 29543, "âľħ": 29544, "penang": 29545, "uplifting": 29546, "epilepsy": 29547, "interro": 29548, "bungal": 29549, "goku": 29550, "blueberries": 29551, "द": 29552, "ussia": 29553, "silky": 29554, "moured": 29555, "istic": 29556, "briefs": 29557, "meats": 29558, "gob": 29559, "chaser": 29560, "statewide": 29561, "prasad": 29562, "glitch": 29563, "arin": 29564, "banff": 29565, "member": 29566, "ðŁĺŃâĿ¤ï¸ı": 29567, "loving": 29568, "halla": 29569, "Ć ĀøĀ”": 29570, "smokers": 29571, "yaku": 29572, "scicomm": 29573, "physio": 29574, "swol": 29575, "lemons": 29576, "gelato": 29577, "chool": 29578, "capitals": 29579, "kistan": 29580, "tights": 29581, "spikes": 29582, "travellers": 29583, "iklan": 29584, "commissioning": 29585, "arine": 29586, "emabiggestfans": 29587, "emphasis": 29588, "frontline": 29589, "paddock": 29590, "destructive": 29591, "baha": 29592, "linger": 29593, "jewish": 29594, "shetland": 29595, "mcgin": 29596, "monkey": 29597, "koz": 29598, "sone": 29599, "rajini": 29600, "teh": 29601, "yen": 29602, "cvs": 29603, "masquer": 29604, "girly": 29605, "wesle": 29606, "wasnt": 29607, "brody": 29608, "terminator": 29609, "gille": 29610, "maggi": 29611, "birdie": 29612, "jeopardy": 29613, "cubic": 29614, "vmware": 29615, "intricate": 29616, "anup": 29617, "topia": 29618, "easton": 29619, "sabres": 29620, "investigates": 29621, "busting": 29622, "bilingual": 29623, "valentino": 29624, "informat": 29625, "ferre": 29626, "adventur": 29627, "hydrate": 29628, "forsy": 29629, "aziz": 29630, "santo": 29631, "ede": 29632, "whistler": 29633, "continuously": 29634, "dham": 29635, "unused": 29636, "jihad": 29637, "addictive": 29638, "vidy": 29639, "dob": 29640, "ido": 29641, "fied": 29642, "niversary": 29643, "none": 29644, "fuer": 29645, "ðŁĺįðŁĺĺ": 29646, "covenant": 29647, "printable": 29648, "immaculate": 29649, "oem": 29650, "clt": 29651, "servants": 29652, "consumed": 29653, "unreleased": 29654, "scum": 29655, "packaged": 29656, "mere": 29657, "ìĦ¸ë¸": 29658, "toby": 29659, "taf": 29660, "spoons": 29661, "meal": 29662, "fball": 29663, "fairfield": 29664, "janet": 29665, "silverstone": 29666, "dartmouth": 29667, "followme": 29668, "voyager": 29669, "kombat": 29670, "anniver": 29671, "enew": 29672, "magdal": 29673, "hove": 29674, "sath": 29675, "grizzly": 29676, "cardi": 29677, "gartner": 29678, "sandy": 29679, "kanye": 29680, "posture": 29681, "poign": 29682, "impulse": 29683, "radiology": 29684, "horizons": 29685, "siam": 29686, "aishwar": 29687, "==>": 29688, "noche": 29689, "tris": 29690, "elyn": 29691, "comme": 29692, "dui": 29693, "cec": 29694, "councillors": 29695, "cuddling": 29696, "creeping": 29697, "locke": 29698, "manages": 29699, "transferred": 29700, "necks": 29701, "dier": 29702, "dano": 29703, "vick": 29704, "lunches": 29705, "dhe": 29706, "ensures": 29707, "criss": 29708, "ulster": 29709, "bannon": 29710, "contenders": 29711, "spam": 29712, "sweetness": 29713, "medal": 29714, "honduras": 29715, "arctic": 29716, "ultrasound": 29717, "infr": 29718, "discovers": 29719, "eiffel": 29720, "casters": 29721, "ruben": 29722, "dust": 29723, "aweed": 29724, "atrium": 29725, "lestwe": 29726, "seared": 29727, "ðŁĵº:": 29728, "tyne": 29729, "exchanges": 29730, "littlemix": 29731, "lle": 29732, "astronauts": 29733, "hershey": 29734, "workday": 29735, "knob": 29736, "sov": 29737, "resigns": 29738, "todayshow": 29739, "derman": 29740, "anth": 29741, "afc": 29742, "taster": 29743, "swoo": 29744, "saeed": 29745, "pering": 29746, "narrowly": 29747, "rnli": 29748, "bestbuy": 29749, "panasonic": 29750, "obstacle": 29751, "farmers": 29752, "ðŁİĻ": 29753, "pawan": 29754, "kiest": 29755, "angers": 29756, "absurd": 29757, "ohmy": 29758, "sino": 29759, "pistachi": 29760, "spice": 29761, "giuli": 29762, "primetime": 29763, "kow": 29764, "kens": 29765, "exagger": 29766, "!?!": 29767, "uba": 29768, "middles": 29769, "judd": 29770, "ejec": 29771, "slammed": 29772, "pensions": 29773, "ofa": 29774, "recreate": 29775, "bhp": 29776, "xxl": 29777, "liverpool": 29778, "thresh": 29779, "purity": 29780, "nieu": 29781, "holics": 29782, "wrath": 29783, "rado": 29784, "glio": 29785, "amma": 29786, "dilemma": 29787, "cru": 29788, "letsgo": 29789, "....@": 29790, "âĿĵ": 29791, "suggesting": 29792, "trumps": 29793, "horus": 29794, "fv": 29795, "icom": 29796, "referring": 29797, "predictive": 29798, "tarts": 29799, "gette": 29800, "sock": 29801, "glossy": 29802, "pinky": 29803, "alec": 29804, "thyme": 29805, "oura": 29806, "theroad": 29807, "petr": 29808, "cram": 29809, "pfi": 29810, "dvn": 29811, "meier": 29812, "incentives": 29813, "tunnels": 29814, "mobil": 29815, "recap": 29816, "extras": 29817, "upright": 29818, "revamp": 29819, "perseverance": 29820, ",-": 29821, "otp": 29822, "mirror": 29823, "arwx": 29824, "gerry": 29825, "maher": 29826, "gor": 29827, "homepage": 29828, "amis": 29829, "agra": 29830, "madele": 29831, "bestfriend": 29832, "siriusxm": 29833, "bundles": 29834, "admiring": 29835, "tdsb": 29836, "ðŁįģ": 29837, "chas": 29838, "slowing": 29839, "roh": 29840, "wallpapers": 29841, "â̦/": 29842, "tekken": 29843, "gangs": 29844, "tala": 29845, "lindsay": 29846, "shoul": 29847, "linebacker": 29848, "toolkit": 29849, "uranium": 29850, "calyp": 29851, "abrams": 29852, "matthi": 29853, "ðŁı¿": 29854, "honourable": 29855, "dayo": 29856, "versail": 29857, "tank": 29858, "stc": 29859, "fritz": 29860, "splend": 29861, "patag": 29862, "annoyed": 29863, "onday": 29864, "devastated": 29865, "chattanooga": 29866, "nationalism": 29867, "massey": 29868, "jenn": 29869, "tailor": 29870, "devgn": 29871, "organs": 29872, "zucchini": 29873, "onfox": 29874, "satire": 29875, "wexford": 29876, "disgrace": 29877, "noto": 29878, "volta": 29879, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, "à¶": 29881, "homeowners": 29882, "pointer": 29883, "mcr": 29884, "austen": 29885, "daysto": 29886, "moons": 29887, "palma": 29888, "grazing": 29889, "eso": 29890, "influencers": 29891, "shahidkapoor": 29892, "compliant": 29893, "measurements": 29894, "develops": 29895, "yd": 29896, "parl": 29897, "pvt": 29898, "randolph": 29899, "tortured": 29900, "gerald": 29901, "elias": 29902, "deepikap": 29903, "warmup": 29904, "hickory": 29905, "gap": 29906, "coffin": 29907, "amour": 29908, "reneg": 29909, "mounting": 29910, "sevens": 29911, "igle": 29912, "hier": 29913, "decad": 29914, "tright": 29915, "escapes": 29916, "werner": 29917, "tfl": 29918, "fulfilled": 29919, "niger": 29920, "sourdough": 29921, "reaper": 29922, "chooses": 29923, "spinner": 29924, "weeknd": 29925, "filtered": 29926, "shuk": 29927, "kati": 29928, "oldham": 29929, "opensource": 29930, "khanna": 29931, "atelier": 29932, "connec": 29933, "ophobic": 29934, "glas": 29935, "complications": 29936, "arson": 29937, "councils": 29938, "smol": 29939, "assy": 29940, "lurking": 29941, "lingui": 29942, "hanks": 29943, "ein": 29944, "ƙħ": 29945, "rugs": 29946, "nguyen": 29947, "nouveau": 29948, "menace": 29949, "lev": 29950, "aladdin": 29951, "ruining": 29952, "roundabout": 29953, "km": 29954, "conor": 29955, "shoops": 29956, "mayday": 29957, "traumatic": 29958, "prabhas": 29959, "kaiser": 29960, "kita": 29961, "router": 29962, "pedro": 29963, "retar": 29964, "stunner": 29965, "spanish": 29966, "disturbed": 29967, "academy": 29968, "elearning": 29969, "witty": 29970, "seng": 29971, "feral": 29972, "avy": 29973, "stab": 29974, "keaton": 29975, "urdu": 29976, "koto": 29977, "hui": 29978, "cooke": 29979, "arian": 29980, "thepersonal": 29981, "uma": 29982, "seap": 29983, "asting": 29984, "rhetoric": 29985, "handwriting": 29986, "municipality": 29987, "consortium": 29988, "ðŁIJŁ": 29989, "glasgow": 29990, "raya": 29991, "eliza": 29992, "polymer": 29993, "broth": 29994, "practi": 29995, "correspondent": 29996, "addicts": 29997, "gayle": 29998, "ailing": 29999, "ofe": 30000, "pli": 30001, "heartw": 30002, "stitch": 30003, "sightings": 30004, "priests": 30005, "samo": 30006, "sloth": 30007, "goodwood": 30008, "rocco": 30009, "sabc": 30010, "summit": 30011, "lace": 30012, "presley": 30013, "itten": 30014, "cincy": 30015, "thepersonalnetwork": 30016, "sweek": 30017, "pegas": 30018, "afcon": 30019, "registry": 30020, "cim": 30021, "leth": 30022, "dicap": 30023, "candice": 30024, "fluent": 30025, "smack": 30026, "pedestri": 30027, "aloud": 30028, "carac": 30029, "priyankach": 30030, "pgh": 30031, "irons": 30032, "dolce": 30033, "latvia": 30034, "deceased": 30035, "therock": 30036, "clap": 30037, "cene": 30038, "foam": 30039, "morrissey": 30040, "gret": 30041, "essentially": 30042, "comcast": 30043, "beagle": 30044, "argues": 30045, "inged": 30046, "-â̦": 30047, "sag": 30048, "hasan": 30049, "ðŁĻĨ": 30050, "ðŁį°": 30051, "nhra": 30052, "kannada": 30053, "indicators": 30054, "oner": 30055, "brixton": 30056, "atas": 30057, "screenplay": 30058, "sorority": 30059, "shaheed": 30060, "heem": 30061, "classmates": 30062, "tainment": 30063, "esi": 30064, "breastcancer": 30065, "zuckerberg": 30066, "auror": 30067, "encia": 30068, "refers": 30069, "kaeper": 30070, "vortex": 30071, "compart": 30072, "lymph": 30073, "photographing": 30074, "steff": 30075, "restling": 30076, "parsley": 30077, "momento": 30078, "thman": 30079, "lacking": 30080, "dutt": 30081, "oculus": 30082, "fino": 30083, "frenzy": 30084, "rasc": 30085, "dern": 30086, "dismissed": 30087, "nook": 30088, "metgala": 30089, "shill": 30090, "raphael": 30091, "mavericks": 30092, "exhibits": 30093, "eagerly": 30094, "cpa": 30095, "amenities": 30096, ".âłĢ": 30097, "exodus": 30098, "ernst": 30099, "lita": 30100, "dealt": 30101, "womensmarch": 30102, "iain": 30103, "scoreboard": 30104, "campeones": 30105, "cen": 30106, "tiki": 30107, "garrison": 30108, "fidelity": 30109, "brag": 30110, "roadmap": 30111, "psychop": 30112, "loe": 30113, "bleu": 30114, "ðŁijĬðŁı¼": 30115, "sauvi": 30116, "springer": 30117, "temptation": 30118, "rudolph": 30119, "acura": 30120, "wicz": 30121, "parachute": 30122, "strol": 30123, "lenny": 30124, "zik": 30125, "doms": 30126, "nbaf": 30127, "alpac": 30128, "vivian": 30129, "rove": 30130, "preet": 30131, "perpetu": 30132, "snake": 30133, "airsoft": 30134, "inflatable": 30135, "princes": 30136, "atie": 30137, "ffey": 30138, "patient": 30139, "mire": 30140, "chelle": 30141, "slack": 30142, "groovy": 30143, "#:": 30144, "uploading": 30145, "!!!!!!!!!!!!!!!!": 30146, "siemens": 30147, "provision": 30148, "vfx": 30149, "needy": 30150, "fats": 30151, "topoli": 30152, "bhutto": 30153, "sathletics": 30154, "alums": 30155, "twinning": 30156, "southwestern": 30157, "adopting": 30158, "lastnight": 30159, "manne": 30160, "laga": 30161, "twell": 30162, "acia": 30163, "----": 30164, "eyewear": 30165, "hurley": 30166, "flee": 30167, "sach": 30168, "pecker": 30169, "costly": 30170, "isk": 30171, "crates": 30172, "policy": 30173, "erosion": 30174, "ingo": 30175, "werk": 30176, "ðŁIJį": 30177, "tortoise": 30178, "therapies": 30179, "internet": 30180, "chihuahua": 30181, "rips": 30182, "frei": 30183, "edor": 30184, "taiji": 30185, "tfc": 30186, "dod": 30187, "dempsey": 30188, "christin": 30189, "cheng": 30190, "hips": 30191, "graeme": 30192, "compassionate": 30193, "cavaliers": 30194, "historic": 30195, "soulful": 30196, "criminal": 30197, "jac": 30198, "vinci": 30199, "expired": 30200, "surat": 30201, "turismo": 30202, "kona": 30203, "seaweed": 30204, "berts": 30205, "leica": 30206, "expressing": 30207, "aal": 30208, "wort": 30209, "breakfast": 30210, "herring": 30211, "amused": 30212, "rhubarb": 30213, "martian": 30214, "cosplayer": 30215, "yash": 30216, "strial": 30217, "raul": 30218, "referral": 30219, "dwts": 30220, "jw": 30221, "adler": 30222, "curtains": 30223, "gur": 30224, "valence": 30225, "tyrone": 30226, "swfc": 30227, "coached": 30228, "reborn": 30229, "diabetic": 30230, "choke": 30231, "norfolk": 30232, "investigative": 30233, "ðŁē¯ðŁē¯": 30234, "zid": 30235, "vmas": 30236, "phie": 30237, "objectives": 30238, "âľĭ": 30239, "overdue": 30240, "divers": 30241, "matsu": 30242, "ðŁİŁï¸ı": 30243, "casualties": 30244, "ว": 30245, "alk": 30246, "standardi": 30247, "realist": 30248, "artifacts": 30249, "pandor": 30250, "kex": 30251, "invin": 30252, "(!)": 30253, "iney": 30254, "paraly": 30255, "mrt": 30256, "faye": 30257, "thevoice": 30258, "onga": 30259, "deed": 30260, "skinner": 30261, "azwx": 30262, "specimen": 30263, "priyankachopra": 30264, "nuevo": 30265, "barkley": 30266, "toulouse": 30267, "resumes": 30268, "footballers": 30269, "citi": 30270, "fetch": 30271, "ère": 30272, "lestweforget": 30273, "ðŁĻĭ": 30274, "chunk": 30275, "drifting": 30276, "manipulation": 30277, "equals": 30278, "putt": 30279, "kyungsoo": 30280, "âĿ¤ï¸ı#": 30281, "elastic": 30282, "parano": 30283, "foy": 30284, "doping": 30285, "cincy": 30286, "ssler": 30287, "interrupted": 30288, "alay": 30289, "adores": 30290, "amethy": 30291, "convoy": 30292, "ãĢı": 30293, "Ĭãģ": 30294, "blacklist": 30295, "generals": 30296, "sachin": 30297, "brushed": 30298, "ounces": 30299, "nonstop": 30300, "illiams": 30301, "btsarmy": 30302, "uav": 30303, "ruff": 30304, "burma": 30305, "bik": 30306, "defence": 30307, "schultz": 30308, "boasts": 30309, "loneliness": 30310, "gore": 30311, "transforms": 30312, "alumna": 30313, "@@": 30314, "rappers": 30315, "nehru": 30316, "caro": 30317, "himalayan": 30318, "wearables": 30319, "geh": 30320, "peppermint": 30321, "redevelopment": 30322, "flamingo": 30323, "cosby": 30324, "bigbaldhead": 30325, "agri": 30326, "barefoot": 30327, "scopes": 30328, "regram": 30329, "ghana": 30330, "ðŁİ«": 30331, "iheart": 30332, "sadie": 30333, "carrie": 30334, "microbial": 30335, "kuala": 30336, "skater": 30337, "querque": 30338, "âĻ©": 30339, "genres": 30340, "reasoning": 30341, "chased": 30342, "aso": 30343, "slipped": 30344, "encan": 30345, "vamos": 30346, "kers": 30347, "adverse": 30348, "moil": 30349, "commodities": 30350, "withyou": 30351, "silent": 30352, "hype": 30353, "ande": 30354, "amination": 30355, "whispe": 30356, "litz": 30357, "âļ½ï¸ıâļ½ï¸ı": 30358, "riff": 30359, "ppy": 30360, "lambs": 30361, "ganesh": 30362, "absent": 30363, "regulator": 30364, "marseille": 30365, "enroll": 30366, "parcel": 30367, "wap": 30368, "byrd": 30369, "ðŁĩŃ": 30370, "tuber": 30371, "countrymusic": 30372, "parl": 30373, "controllers": 30374, "responsibilities": 30375, "wey": 30376, "chate": 30377, "montenegro": 30378, "chico": 30379, "milan": 30380, "lms": 30381, "trainees": 30382, "appropriately": 30383, "uncertain": 30384, "poppies": 30385, "edsheeran": 30386, "nutritious": 30387, "garo": 30388, "deutsch": 30389, "awesome": 30390, "ãĄ¼": 30391, "comfortably": 30392, "landmarks": 30393, "eti": 30394, "reusable": 30395, "danielle": 30396, "rosal": 30397, "coles": 30398, "justic": 30399, "ccs": 30400, "fanny": 30401, "nim": 30402, "mcu": 30403, "clinch": 30404, "atene": 30405, "merge": 30406, "imdb": 30407, "anglo": 30408, "uccino": 30409, "panini": 30410, "annot": 30411, "burberry": 30412, "feature": 30413, "predicting": 30414, "fashionista": 30415, "sask": 30416, "imaginary": 30417, "mmo": 30418, "southsudan": 30419, "spear": 30420, "hubble": 30421, "jointhe": 30422, "coyotes": 30423, "sligo": 30424, "kodak": 30425, "sitcom": 30426, "polaroid": 30427, "rooted": 30428, "corrup": 30429, "ðŁĻĮðŁĻĮ": 30430, "brisban": 30431, "atz": 30432, "ahl": 30433, "remy": 30434, "talent": 30435, "avalon": 30436, "rada": 30437, "pauline": 30438, "locomotive": 30439, "goons": 30440, "nemo": 30441, "maserati": 30442, "icu": 30443, "stutt": 30444, "historically": 30445, "smb": 30446, "presby": 30447, "avoid": 30448, "sooners": 30449, "rhinestone": 30450, "wad": 30451, "rising": 30452, "trot": 30453, "modes": 30454, "regent": 30455, "optimize": 30456, "reece": 30457, "smu": 30458, "verti": 30459, "newyorkcity": 30460, "cortez": 30461, "rac": 30462, "incase": 30463, "sinc": 30464, "fielding": 30465, "etta": 30466, "tiffany": 30467, "almonds": 30468, "saddle": 30469, "krat": 30470, "matter": 30471, "glow": 30472, "starving": 30473, "glo": 30474, "crappy": 30475, "slur": 30476, "std": 30477, "monitors": 30478, "receipt": 30479, "maymayentrata": 30480, "mcil": 30481, "unis": 30482, "rainbows": 30483, "caldwell": 30484, "pacquiao": 30485, "jop": 30486, "afe": 30487, "hook": 30488, "essen": 30489, "wizard": 30490, "median": 30491, "flaws": 30492, "coms": 30493, "âĿĦ": 30494, "ingh": 30495, "haynes": 30496, "antonio": 30497, "templates": 30498, "outer": 30499, "naw": 30500, "cardigan": 30501, "belgrade": 30502, "ðŁēī": 30503, "homo": 30504, "aise": 30505, "ropes": 30506, "nove": 30507, "whatyou": 30508, "trigge": 30509, "conception": 30510, "adukone": 30511, "nadi": 30512, "friars": 30513, "swer": 30514, "adjusted": 30515, "hotline": 30516, "sanity": 30517, "kaur": 30518, "downloading": 30519, "cgi": 30520, "tenor": 30521, "ethnic": 30522, "appalach": 30523, "Ć ĀøĀø": 30524, "pag": 30525, "golds": 30526, "onset": 30527, "investigator": 30528, "cartel": 30529, "peacefully": 30530, "jarrett": 30531, "catalan": 30532, "polio": 30533, "num": 30534, "frustration": 30535, "dharma": 30536, "mylife": 30537, "âľĮðŁı»": 30538, "aberdeen": 30539, "musa": 30540, "binder": 30541, "sparkly": 30542, "fleeing": 30543, "instinct": 30544, "coping": 30545, "dominance": 30546, "illers": 30547, "era": 30548, "uconn": 30549, "looms": 30550, "livingston": 30551, "gali": 30552, "hes": 30553, "cma": 30554, "bela": 30555, "seley": 30556, "monk": 30557, "lach": 30558, "marx": 30559, "“": 30560, "merica": 30561, "womanin": 30562, "essex": 30563, "raina": 30564, "jimi": 30565, "neptune": 30566, "zack": 30567, "chinese": 30568, "martins": 30569, "chandelier": 30570, "hern": 30571, "withus": 30572, "earl": 30573, "asphalt": 30574, "modules": 30575, "stp": 30576, "ulla": 30577, "psychiatric": 30578, "mileage": 30579, "captivating": 30580, "sider": 30581, "mento": 30582, "mort": 30583, "trance": 30584, "talbot": 30585, "abby": 30586, "ƬĄ": 30587, "âľĮðŁı¼": 30588, "jak": 30589, "dawn": 30590, "turnup": 30591, "screwed": 30592, "feds": 30593, "blueprint": 30594, "ðŁēĸðŁēĸ": 30595, "harsh": 30596, "eros": 30597, "insomnia": 30598, "bankers": 30599, "taemin": 30600, "misconduct": 30601, "humber": 30602, "gidi": 30603, "eduardo": 30604, "cona": 30605, "muscular": 30606, "consuming": 30607, "rash": 30608, "donnie": 30609, "dipped": 30610, "collie": 30611, "samuel": 30612, "meltdown": 30613, "ðŁĺįðŁĺįðŁĺį": 30614, "mez": 30615, "examining": 30616, "schwartz": 30617, "pristine": 30618, "ðŁIJĿ": 30619, "veit": 30620, "fulfilling": 30621, "anesthe": 30622, "guesses": 30623, "draft": 30624, "somme": 30625, "solid": 30626, "pational": 30627, "hoped": 30628, "evolutionary": 30629, "aller": 30630, "entertained": 30631, "slips": 30632, "ludwig": 30633, "concludes": 30634, "sensible": 30635, "bonnet": 30636, "craze": 30637, "tras": 30638, "hazards": 30639, "constantine": 30640, "edics": 30641, "startrek": 30642, "toc": 30643, "occupational": 30644, "incheon": 30645, "deepikapadukone": 30646, "pizzas": 30647, "newcomer": 30648, "depart": 30649, "oppression": 30650, "ebony": 30651, "fossils": 30652, "trojan": 30653, "elen": 30654, "steaks": 30655, "khou": 30656, "positioning": 30657, "ugby": 30658, "redcross": 30659, "akh": 30660, "dolce": 30661, "usmnt": 30662, "ppen": 30663, "dilig": 30664, "mavs": 30665, "caller": 30666, "costello": 30667, "âĽĦ": 30668, "dyn": 30669, "things": 30670, "rhinos": 30671, "axi": 30672, "sarkar": 30673, "convocation": 30674, "atters": 30675, "ssss": 30676, "fungus": 30677, "eugen": 30678, "russo": 30679, "squat": 30680, "wsb": 30681, "elion": 30682, "williamsburg": 30683, "soff": 30684, "deficiency": 30685, "bearer": 30686, "okin": 30687, "keystone": 30688, "twain": 30689, "calming": 30690, "breakable": 30691, "wares": 30692, "horseracing": 30693, "combs": 30694, "bunting": 30695, "uit": 30696, "tland": 30697, "ðŁēĻðŁēĻðŁēĻ": 30698, "gastron": 30699, "sabot": 30700, "ickers": 30701, "commissioners": 30702, "senate": 30703, "iiot": 30704, "athena": 30705, "nitrogen": 30706, "antony": 30707, "erotic": 30708, "dialo": 30709, "missou": 30710, "hypocr": 30711, "âľĪ": 30712, "kaepernick": 30713, "canv": 30714, "droo": 30715, "cleveland": 30716, "osh": 30717, "monsta": 30718, "stefano": 30719, "^)": 30720, "shul": 30721, "poison": 30722, "hae": 30723, "commercials": 30724, "maul": 30725, "nitro": 30726, "coworker": 30727, "aloe": 30728, "vapor": 30729, "tents": 30730, "russian": 30731, "quid": 30732, "questionable": 30733, "midget": 30734, "poker": 30735, "girlfriends": 30736, "sinthe": 30737, "eritrea": 30738, "tenure": 30739, "deposits": 30740, "buckeyes": 30741, "spotter": 30742, "theodore": 30743, "trinity": 30744, "joaquin": 30745, "ucci": 30746, "followthe": 30747, "cafc": 30748, "mpa": 30749, "ðŁIJ»": 30750, "plotting": 30751, "domino": 30752, "taek": 30753, "sionally": 30754, "dicaprio": 30755, "pap": 30756, "carmel": 30757, "iger": 30758, "btcc": 30759, "bethle": 30760, "wwwbigbaldhead": 30761, "foodie": 30762, "baghdad": 30763, "masonry": 30764, "offended": 30765, "Ć Ā·": 30766, "Ć ĀøÄ£": 30767, "scro": 30768, "verses": 30769, "orient": 30770, "arches": 30771, "piyu": 30772, "knowyour": 30773, "gree": 30774, "takers": 30775, "guard": 30776, "dishon": 30777, "bucketlist": 30778, "bhafc": 30779, "wardly": 30780, "ðŁİīðŁİĬ": 30781, "leighton": 30782, "pew": 30783, "stray": 30784, "assaulted": 30785, "inhal": 30786, "lyfe": 30787, "amarketing": 30788, "lx": 30789, "katz": 30790, "ubuntu": 30791, "meo": 30792, "cartoonist": 30793, "turnover": 30794, "miz": 30795, "dislike": 30796, "mullen": 30797, "mof": 30798, "bland": 30799, "hides": 30800, "emerges": 30801, "chorizo": 30802, "trustee": 30803, "mahog": 30804, "lansing": 30805, "paralympic": 30806, "faint": 30807, "fauna": 30808, "chal": 30809, "snar": 30810, "cath": 30811, "benton": 30812, "castillo": 30813, "slippery": 30814, "apricot": 30815, "oecd": 30816, "baro": 30817, "lz": 30818, "heming": 30819, "clowns": 30820, "coworkers": 30821, "peruvian": 30822, "commuters": 30823, "yell": 30824, "ðŁļ“": 30825, "undering": 30826, "vj": 30827, "ttp": 30828, "flipk": 30829, "wana": 30830, "socent": 30831, "ĤâĸĤâĸ": 30832, "à¤Ĥ": 30833, "oosa": 30834, "jagger": 30835, "dism": 30836, "eless": 30837, "dham": 30838, "calif": 30839, "aofficial": 30840, "eclip": 30841, "harrogate": 30842, "grapp": 30843, "comrade": 30844, "ntr": 30845, "concentrate": 30846, "thighs": 30847, "bitcoin": 30848, "belarus": 30849, "ƫĵ": 30850, "enduring": 30851, "nowwatching": 30852, "industrial": 30853, "pip": 30854, "aron": 30855, "arat": 30856, "®": 30857, "whitby": 30858, "ooooooo": 30859, "saree": 30860, "ticals": 30861, "misleading": 30862, "yoon": 30863, "years": 30864, "sleigh": 30865, "romanian": 30866, "scissors": 30867, "vampires": 30868, "acup": 30869, "abba": 30870, "thweeksary": 30871, "centri": 30872, "flye": 30873, "uo": 30874, "cbi": 30875, "buena": 30876, "sind": 30877, "marino": 30878, "burr": 30879, "rebuilding": 30880, "ल": 30881, "anniversaire": 30882, "acca": 30883, "ðŁēĢðŁēĢ": 30884, "getting": 30885, "tulips": 30886, "wolfpack": 30887, "âľįï¸ı": 30888, "morethan": 30889, "takin": 30890, "ð٤ĺðŁı»": 30891, "ube": 30892, "monic": 30893, "doubts": 30894, "mower": 30895, "cobalt": 30896, "donne": 30897, "speculation": 30898, "arguably": 30899, "kaku": 30900, "https": 30901, "prosecution": 30902, "dinah": 30903, "stamatic": 30904, "disclosed": 30905, "beverly": 30906, "flwx": 30907, "crabs": 30908, "extraordinaire": 30909, "warmest": 30910, "imperi": 30911, "ologists": 30912, "traces": 30913, "parc": 30914, "lakeside": 30915, "amr": 30916, "teri": 30917, "hourly": 30918, "domination": 30919, "arrow": 30920, "shrewsbury": 30921, "ancestry": 30922, "wrangler": 30923, "triggered": 30924, "pensac": 30925, "rooster": 30926, "survives": 30927, "aon": 30928, "boko": 30929, "valor": 30930, "loveis": 30931, "lag": 30932, "pey": 30933, "focal": 30934, "outlaws": 30935, "blanc": 30936, "articho": 30937, "wits": 30938, "marshall": 30939, "diego": 30940, "supportsmall": 30941, "uca": 30942, "sah": 30943, "jeet": 30944, "synago": 30945, "governing": 30946, "ðŁē¬": 30947, "salads": 30948, "create": 30949, "miriam": 30950, "censored": 30951, "amide": 30952, "nou": 30953, "zeta": 30954, "allegiance": 30955, "*)": 30956, "blm": 30957, "rican": 30958, "pastors": 30959, "olympus": 30960, "bloc": 30961, "whirl": 30962, "starry": 30963, "prone": 30964, "yk": 30965, "pne": 30966, "congratulating": 30967, "bev": 30968, "sober": 30969, "loveisland": 30970, "sair": 30971, "aning": 30972, "tutorials": 30973, "qe": 30974, "lund": 30975, "inist": 30976, "clever": 30977, "taxpayer": 30978, "aliz": 30979, "wrench": 30980, "ddling": 30981, "capri": 30982, "hpa": 30983, "ðŁı»âĢįâĻĤï¸ı": 30984, "naj": 30985, "oj": 30986, "futuristic": 30987, "jellyfish": 30988, "ðŁĶ„ðŁĶ„ðŁĶ„ðŁĶ„": 30989, "celery": 30990, "plank": 30991, "fila": 30992, "neme": 30993, "unhealthy": 30994, "lections": 30995, "ðŁ§”": 30996, "ritchie": 30997, "nws": 30998, "mikha": 30999, "wonderwoman": 31000, "âĢİ": 31001, "hipstamatic": 31002, "kag": 31003, "ðŁēľðŁēľðŁēľ": 31004, "poultry": 31005, "mow": 31006, "words": 31007, "loff": 31008, "ðŁ¤£ðŁ¤£": 31009, "relatable": 31010, "remixes": 31011, "kenyatta": 31012, "kem": 31013, "resigned": 31014, "fod": 31015, "straigh": 31016, "jlo": 31017, "hutch": 31018, "boxers": 31019, "colleen": 31020, "mags": 31021, "instructional": 31022, "kol": 31023, "attracts": 31024, "prag": 31025, "accountant": 31026, "goggles": 31027, "bru": 31028, "thole": 31029, "marrow": 31030, "leuke": 31031, "octo": 31032, "ponds": 31033, "bubbly": 31034, "heist": 31035, "ìĹij": 31036, "imp": 31037, "ahar": 31038, "haunt": 31039, "hallmark": 31040, "psych": 31041, "kkkkkkkk": 31042, "columb": 31043, "jumpsuit": 31044, "costco": 31045, "sidelines": 31046, "aggies": 31047, "overturned": 31048, "nib": 31049, "keychain": 31050, "fuk": 31051, "faf": 31052, "miam": 31053, "assistants": 31054, "cycled": 31055, "rider": 31056, "dammit": 31057, "redwings": 31058, "mages": 31059, "kins": 31060, "ƬĤ": 31061, "hod": 31062, "sont": 31063, "caroline": 31064, "\"'": 31065, "cule": 31066, "braid": 31067, "felony": 31068, "arities": 31069, "rutherford": 31070, "depiction": 31071, "isabelle": 31072, "roach": 31073, "kday": 31074, "fifthharmony": 31075, "emy": 31076, "ligam": 31077, "barista": 31078, "albuquerque": 31079, "gross": 31080, "ðŁįº": 31081, "ooks": 31082, "ðŁij¼": 31083, "duncan": 31084, "tryin": 31085, "jags": 31086, "gould": 31087, "litho": 31088, "âģ£": 31089, "аÐ": 31090, "sammy": 31091, "tung": 31092, "casser": 31093, "apolo": 31094, "aaaaa": 31095, "mang": 31096, "asics": 31097, "shen": 31098, "pye": 31099, "turbul": 31100, "ssp": 31101, "saintsfc": 31102, "onlin": 31103, "nanny": 31104, "hester": 31105, "doz": 31106, "à¸Ķ": 31107, "thread": 31108, "rents": 31109, "khand": 31110, "ðŁēªðŁı½": 31111, "unconditional": 31112, "robson": 31113, "carre": 31114, "phon": 31115, "sacrificed": 31116, "£": 31117, "autos": 31118, "parker": 31119, "oca": 31120, "login": 31121, "keegan": 31122, "hardcover": 31123, "doughnuts": 31124, "ðŁĮİ": 31125, "spitfire": 31126, "refreshments": 31127, "saskatoon": 31128, "commodore": 31129, "jf": 31130, "rubber": 31131, "halamadrid": 31132, "childcare": 31133, "strada": 31134, "iom": 31135, "rik": 31136, "dakar": 31137, "thermom": 31138, "cropped": 31139, "garu": 31140, "alik": 31141, "veni": 31142, "ift": 31143, "sika": 31144, "rituals": 31145, "zul": 31146, "ech": 31147, "©": 31148, "sudan": 31149, "lland": 31150, "ime": 31151, "docker": 31152, "ì¤": 31153, "feared": 31154, "fao": 31155, "walter": 31156, "nog": 31157, "mutuals": 31158, "lh": 31159, "align": 31160, "monia": 31161, "conceptart": 31162, "ðŁĻıðŁı¼": 31163, "scoe": 31164, "competence": 31165, "swine": 31166, "lyme": 31167, "launch": 31168, "greener": 31169, "abstractart": 31170, "inquis": 31171, "granada": 31172, "gaelic": 31173, "fluff": 31174, "dbacks": 31175, "graveyard": 31176, "babe": 31177, "academic": 31178, "adventurous": 31179, "johann": 31180, "~!": 31181, "bibi": 31182, "|#": 31183, "plings": 31184, "getty": 31185, "asb": 31186, "âĿ¤ï¸ı@": 31187, "staff": 31188, "religions": 31189, "bangor": 31190, "worldbookday": 31191, "megh": 31192, "devin": 31193, "ashore": 31194, "meridian": 31195, "github": 31196, "quiz": 31197, "allstars": 31198, "bestest": 31199, "irresi": 31200, "acker": 31201, "dote": 31202, "warrington": 31203, "polly": 31204, "neworleans": 31205, "crou": 31206, "wigs": 31207, "chey": 31208, "smithsonian": 31209, "lasag": 31210, "detour": 31211, "boris": 31212, "straps": 31213, "mariah": 31214, "intentionally": 31215, "koh": 31216, "ðŁį¸": 31217, "ssian": 31218, "marissa": 31219, "coral": 31220, "episcopal": 31221, "casualty": 31222, "tomo": 31223, "supplychain": 31224, "samp": 31225, "ongo": 31226, "roo": 31227, "caviar": 31228, "pfw": 31229, "claudio": 31230, "buffalo": 31231, "sations": 31232, "matty": 31233, "snapback": 31234, "lds": 31235, "alarms": 31236, "matte": 31237, "âĺĶï¸ı": 31238, "conditioner": 31239, "dors": 31240, "hex": 31241, "fizz": 31242, "astri": 31243, "sussex": 31244, "security": 31245, "qaeda": 31246, "allstar": 31247, "cocacola": 31248, "asone": 31249, "clicks": 31250, "scans": 31251, "mute": 31252, "heavier": 31253, "ðŁİ§": 31254, "âĺŀ": 31255, "lvl": 31256, "bookboost": 31257, "youtube": 31258, "flashes": 31259, "fjor": 31260, "csu": 31261, "explode": 31262, "dodge": 31263, "cairn": 31264, "gonzales": 31265, "thill": 31266, "pelle": 31267, "hartley": 31268, "renewable": 31269, "retin": 31270, "estre": 31271, "costarica": 31272, "shipyard": 31273, "ncfc": 31274, "priya": 31275, "aghan": 31276, "anath": 31277, "plugin": 31278, "corey": 31279, "rebound": 31280, "oru": 31281, "katrin": 31282, "hormone": 31283, "gim": 31284, "mahindra": 31285, "ssus": 31286, "parkland": 31287, "harper": 31288, "fantastic": 31289, "inferno": 31290, "epilo": 31291, "wrestling": 31292, "fect": 31293, "cit": 31294, "acoun": 31295, "tossed": 31296, "monumental": 31297, "chartered": 31298, "bust": 31299, "petra": 31300, "âĮļ": 31301, "wildflowerhour": 31302, "sweaters": 31303, "*.": 31304, "bler": 31305, "atech": 31306, "gowan": 31307, "demographic": 31308, "bral": 31309, "suicide": 31310, "renovations": 31311, "vuel": 31312, "sinister": 31313, "armani": 31314, "misogy": 31315, "pharrell": 31316, "naps": 31317, "uniting": 31318, "crusaders": 31319, "corgi": 31320, "insured": 31321, "thani": 31322, "noor": 31323, "gq": 31324, "dada": 31325, "bicycles": 31326, "snuggle": 31327, "schan": 31328, "tenberg": 31329, "ssal": 31330, "femme": 31331, "boil": 31332, "½ï¸ı": 31333, "reap": 31334, "occurring": 31335, "hussein": 31336, "divid": 31337, "stoke": 31338, "shalom": 31339, "naia": 31340, "olic": 31341, "frustrating": 31342, "ƙĩ": 31343, "igs": 31344, "grover": 31345, "scenarios": 31346, "nds": 31347, "brutality": 31348, "medalli": 31349, "buon": 31350, "sass": 31351, "skateboarding": 31352, "onyx": 31353, "lorry": 31354, "nyu": 31355, "gautam": 31356, "mmings": 31357, "gug": 31358, "endi": 31359, "lothian": 31360, "commando": 31361, "chalk": 31362, "phora": 31363, "assessing": 31364, "tigh": 31365, "crunchy": 31366, "aday": 31367, "isl": 31368, "ciara": 31369, "pilgrims": 31370, "kamal": 31371, "pto": 31372, "britanni": 31373, "tani": 31374, "smc": 31375, "lure": 31376, "appstore": 31377, "aby": 31378, "golfing": 31379, "clc": 31380, "fau": 31381, "anas": 31382, "shutting": 31383, "regulated": 31384, "carnage": 31385, "scowboys": 31386, "allenge": 31387, "cma": 31388, "humboldt": 31389, "relle": 31390, "kumb": 31391, "heri": 31392, "refinery": 31393, "soundcheck": 31394, "dwayne": 31395, "bosnia": 31396, "isp": 31397, "thealth": 31398, "anniv": 31399, "relevance": 31400, "mya": 31401, "baggage": 31402, "dread": 31403, "sbc": 31404, "thed": 31405, "buh": 31406, "hijab": 31407, "loid": 31408, "kew": 31409, "cte": 31410, "respect": 31411, "lovelies": 31412, "cubes": 31413, "celebrate": 31414, "dirt": 31415, "savers": 31416, "_,": 31417, "garment": 31418, "pulitzer": 31419, "masjid": 31420, "beatport": 31421, "alarts": 31422, "encryption": 31423, "sner": 31424, "pleads": 31425, "foundry": 31426, "symmetry": 31427, "rumi": 31428, "birthplace": 31429, "scallops": 31430, "supple": 31431, "pivotal": 31432, "tati": 31433, "node": 31434, "sod": 31435, "proxim": 31436, "trics": 31437, "coldest": 31438, "brent": 31439, "mandu": 31440, "clair": 31441, "each": 31442, "andalu": 31443, "hiddleston": 31444, "ðŁIJº": 31445, "melts": 31446, "vance": 31447, "pinn": 31448, "sements": 31449, "screened": 31450, "sachs": 31451, "obl": 31452, "icha": 31453, "âĺĺï¸ı": 31454, "schoolers": 31455, "healed": 31456, "logged": 31457, "ð٤ĺðŁı¼": 31458, "icus": 31459, "boredom": 31460, "bish": 31461, "bffs": 31462, "talking": 31463, "suresh": 31464, "hookem": 31465, "deon": 31466, "defl": 31467, "eileen": 31468, "ðŁįķ": 31469, "womenintech": 31470, "risotto": 31471, "ranger": 31472, "advertise": 31473, "à¸ģà¸": 31474, "telly": 31475, "lago": 31476, "dartmoor": 31477, "dong": 31478, "skates": 31479, "logo": 31480, "unner": 31481, "mailbox": 31482, "masala": 31483, "looooo": 31484, "amethyst": 31485, "chewing": 31486, "cbb": 31487, "australians": 31488, "rcmp": 31489, "gameart": 31490, "#...": 31491, "korn": 31492, "extremism": 31493, "fruitful": 31494, "ancient": 31495, "pubg": 31496, "polite": 31497, "whit": 31498, "murals": 31499, "mgr": 31500, "lineman": 31501, "davao": 31502, "stems": 31503, "tennis": 31504, "avage": 31505, "tupac": 31506, "gigantic": 31507, "hsbc": 31508, "autobiography": 31509, "upthe": 31510, "ีà¹Ī": 31511, "regal": 31512, "figuring": 31513, "kul": 31514, "missy": 31515, "hoop": 31516, "gras": 31517, "forums": 31518, "backlash": 31519, "abducted": 31520, "pnw": 31521, "minic": 31522, "butt": 31523, "bottoms": 31524, "aton": 31525, "veng": 31526, "ðŁĮı": 31527, "delaney": 31528, "prabhu": 31529, "fanclub": 31530, "overhaul": 31531, "healthye": 31532, "syno": 31533, "aaf": 31534, "renamed": 31535, "kimi": 31536, "uncle": 31537, "mancity": 31538, "seu": 31539, "quanti": 31540, "esteem": 31541, "umin": 31542, "enzo": 31543, "melvin": 31544, "undergo": 31545, "jhar": 31546, "farah": 31547, "coasters": 31548, "humphrey": 31549, "mhz": 31550, "childrens": 31551, "^.": 31552, "dhi": 31553, "disruptive": 31554, "integrating": 31555, "rnb": 31556, "oversized": 31557, "aide": 31558, "neau": 31559, "documentation": 31560, "ðŁijĢðŁijĢ": 31561, "palo": 31562, "hearth": 31563, "riyad": 31564, "punctu": 31565, "abcnews": 31566, "secures": 31567, "boyband": 31568, "birch": 31569, "juco": 31570, "traff": 31571, "legislators": 31572, "baya": 31573, "ãĤ¯": 31574, "noises": 31575, "collects": 31576, "swarm": 31577, "kner": 31578, "bishops": 31579, "sturgeon": 31580, "snapping": 31581, "mol": 31582, "freaky": 31583, "chairperson": 31584, "trop": 31585, "lynch": 31586, "carcin": 31587, "artsy": 31588, "esto": 31589, "chai": 31590, "flur": 31591, "invali": 31592, "sausages": 31593, "imel": 31594, "jor": 31595, "funfact": 31596, "witter": 31597, "punished": 31598, "acons": 31599, "hya": 31600, "reversi": 31601, "emc": 31602, "diffu": 31603, "zx": 31604, "spaw": 31605, "clad": 31606, "dmit": 31607, "holland": 31608, "fresco": 31609, "payroll": 31610, "abundant": 31611, "stuffing": 31612, "moro": 31613, "cny": 31614, "boycott": 31615, "wendy": 31616, "eleven": 31617, "provoc": 31618, "pilot": 31619, "trx": 31620, "bead": 31621, "climateaction": 31622, "rion": 31623, "assie": 31624, "ìĸ": 31625, "osm": 31626, "islamic": 31627, "hoar": 31628, "goodreads": 31629, "alici": 31630, "afternoons": 31631, "spokesman": 31632, "jolie": 31633, "itas": 31634, "mascara": 31635, "âĻ©âĻ«": 31636, "prevail": 31637, "beetroot": 31638, "lujah": 31639, "kli": 31640, "dodger": 31641, "»": 31642, "rule": 31643, "ln": 31644, "scream": 31645, "hobart": 31646, "colbert": 31647, "rtc": 31648, "erm": 31649, "patro": 31650, "quoting": 31651, "slive": 31652, "quest": 31653, "nonfiction": 31654, "seminary": 31655, "prosecutors": 31656, "vest": 31657, "expressway": 31658, "gge": 31659, "nautical": 31660, "etf": 31661, "ðŁİīðŁİĬ": 31662, "duration": 31663, "chaired": 31664, "thefilm": 31665, "fabio": 31666, "sheh": 31667, "cano": 31668, "ðŁēªðŁı»": 31669, "withdraw": 31670, "!:)": 31671, "corpus": 31672, "phenom": 31673, "yelp": 31674, "lawn": 31675, "entom": 31676, "snapper": 31677, "butte": 31678, "pinball": 31679, "proxy": 31680, "libre": 31681, "allevi": 31682, "nada": 31683, "gabriel": 31684, "fowl": 31685, "eureka": 31686, "daphne": 31687, "tunes": 31688, "punched": 31689, "whore": 31690, "jog": 31691, "rential": 31692, "manners": 31693, "ope": 31694, "whufc": 31695, "guth": 31696, "revolt": 31697, "sneaker": 31698, "philharmonic": 31699, "hoste": 31700, "sovereignty": 31701, "ðŁĻıðŁĻıðŁĻı": 31702, "fishing": 31703, "sciart": 31704, "feta": 31705, "ipp": 31706, "dumping": 31707, "kelown": 31708, "giri": 31709, "digits": 31710, "salu": 31711, "sanjay": 31712, "tweeters": 31713, "spas": 31714, "colchester": 31715, "scab": 31716, "madd": 31717, "à¹Ħà¸": 31718, "Ƅĩ": 31719, "geddon": 31720, "marchfor": 31721, "dop": 31722, "maureen": 31723, "unplugged": 31724, "dido": 31725, "fashionblogger": 31726, "upa": 31727, "mexic": 31728, "tary": 31729, "polye": 31730, "jameson": 31731, "vt": 31732, "grinder": 31733, "maddy": 31734, "consultancy": 31735, "‘": 31736, "leagueoflegends": 31737, "accents": 31738, "umni": 31739, "janeiro": 31740, "tuss": 31741, "hens": 31742, "amplifier": 31743, "toshi": 31744, "prettier": 31745, "prevents": 31746, "newtown": 31747, "redwood": 31748, "vantage": 31749, "ballard": 31750, "artof": 31751, "ashe": 31752, "asion": 31753, "lacey": 31754, "apat": 31755, "grove": 31756, "à¸Ħ": 31757, "rwand": 31758, "realtors": 31759, "traitor": 31760, "bedding": 31761, "ör": 31762, "zion": 31763, "flashing": 31764, "campan": 31765, "boomer": 31766, "secretariat": 31767, "abol": 31768, "litigation": 31769, "contamination": 31770, "sedly": 31771, "shredded": 31772, "infor": 31773, "doherty": 31774, "benchmark": 31775, "roche": 31776, "skateboard": 31777, "shovel": 31778, "izz": 31779, "topper": 31780, "oster": 31781, "labyrin": 31782, "autum": 31783, "kong": 31784, "hummus": 31785, "viz": 31786, "technews": 31787, "klaus": 31788, "amusing": 31789, "socialmediamarketing": 31790, "ides": 31791, "castell": 31792, "stee": 31793, "underestimate": 31794, "calab": 31795, "paign": 31796, "billing": 31797, "unanimously": 31798, "gmb": 31799, "flyfishing": 31800, "hathaway": 31801, "commercial": 31802, "colouring": 31803, "skulls": 31804, "pivot": 31805, "tep": 31806, "tbc": 31807, "motorway": 31808, "xpress": 31809, "constructive": 31810, "puk": 31811, "underlying": 31812, "kirsten": 31813, "maniac": 31814, "chao": 31815, "sema": 31816, "chiffon": 31817, "ðŁijĮðŁı»": 31818, "verona": 31819, "komo": 31820, "standoff": 31821, "wiped": 31822, "cated": 31823, "blair": 31824, "workin": 31825, "msc": 31826, "bethlehem": 31827, "swipe": 31828, "unexpec": 31829, "pees": 31830, "petri": 31831, "origami": 31832, "ðŁijħ": 31833, "mexico": 31834, "flavor": 31835, "rudd": 31836, "cannabis": 31837, "maru": 31838, "riddle": 31839, "worshi": 31840, "silon": 31841, "schat": 31842, "apse": 31843, "tanger": 31844, "bious": 31845, "eer": 31846, "questioned": 31847, "ozar": 31848, "dank": 31849, "anglesey": 31850, "charan": 31851, "baku": 31852, "competen": 31853, "repri": 31854, "batter": 31855, "saxon": 31856, "calves": 31857, "lengths": 31858, "$$$": 31859, "âŀ”ï¸ı": 31860, "immersion": 31861, "gaunt": 31862, "carry": 31863, "cyto": 31864, "banda": 31865, "shutt": 31866, "experience": 31867, "elgin": 31868, "mousse": 31869, "taz": 31870, "êµ": 31871, "incorrect": 31872, "enz": 31873, "bham": 31874, "moron": 31875, "sover": 31876, "arun": 31877, "tipped": 31878, "lable": 31879, "dearly": 31880, "bautista": 31881, "Ć­Ä»": 31882, "mortal": 31883, "woop": 31884, "dtla": 31885, "shocks": 31886, "davos": 31887, "ðŁĵĿ": 31888, "swimwear": 31889, "herman": 31890, "ðŁijĩðŁijĩ": 31891, "zir": 31892, "neglected": 31893, "graced": 31894, "campuses": 31895, "avs": 31896, "arora": 31897, "swachhb": 31898, "livepd": 31899, "accra": 31900, "enquiries": 31901, "shooters": 31902, "kurt": 31903, "vancouver": 31904, "bradley": 31905, "garda": 31906, "gü": 31907, "olla": 31908, "attracting": 31909, "upton": 31910, "newin": 31911, "lumia": 31912, "furnace": 31913, "evers": 31914, "eon": 31915, "swa": 31916, "rookies": 31917, "aoc": 31918, "vss": 31919, "brisket": 31920, "torch": 31921, "yoda": 31922, "heartland": 31923, "taco": 31924, "phony": 31925, "foodbank": 31926, "abbey": 31927, "babylon": 31928, "uy": 31929, "greate": 31930, "expresses": 31931, "dandy": 31932, "scapes": 31933, "survivor": 31934, "rond": 31935, "eci": 31936, "havin": 31937, "abel": 31938, "childish": 31939, "torque": 31940, "wavy": 31941, "urself": 31942, "kanyewest": 31943, "yearof": 31944, "alestine": 31945, "obrien": 31946, "alfon": 31947, "skag": 31948, "korean": 31949, "anchorage": 31950, "valeri": 31951, "dew": 31952, "ðŁİ¨": 31953, "landslide": 31954, "carole": 31955, "christen": 31956, "gophers": 31957, "afi": 31958, "priyanka": 31959, "qq": 31960, "powerof": 31961, "itte": 31962, "pcso": 31963, "twol": 31964, "pry": 31965, "intellectu": 31966, "guerrero": 31967, "piles": 31968, "wishlist": 31969, "wren": 31970, "timetable": 31971, "ƫı": 31972, "prodigy": 31973, "gibbons": 31974, "./": 31975, "neur": 31976, "anzac": 31977, "murray": 31978, "viest": 31979, "plaster": 31980, "lair": 31981, "artgallery": 31982, "intercontinental": 31983, "gbr": 31984, "bellator": 31985, "namjoon": 31986, "mammals": 31987, "amel": 31988, "yaw": 31989, "sarasota": 31990, "camar": 31991, "budding": 31992, "summari": 31993, "acosta": 31994, "lash": 31995, "eyou": 31996, "postgraduate": 31997, "instructors": 31998, "tig": 31999, "constant": 32000, "werewolf": 32001, "icos": 32002, "clas": 32003, "glenn": 32004, "budge": 32005, "ðŁĻĤ": 32006, "erta": 32007, "stains": 32008, "persecution": 32009, "cumbri": 32010, "och": 32011, "synergy": 32012, "huang": 32013, "scandin": 32014, "midterms": 32015, "commentator": 32016, "regarded": 32017, "perpetual": 32018, "boiling": 32019, "alp": 32020, "lange": 32021, "schle": 32022, "faceli": 32023, "tweeta": 32024, "ridden": 32025, "oktoberfest": 32026, "charlottesville": 32027, "iklan": 32028, "jou": 32029, "chatham": 32030, "bsc": 32031, "ðŁį¦": 32032, "strauss": 32033, "mellow": 32034, "xxxx": 32035, "happyhour": 32036, "reactor": 32037, "wwer": 32038, "distraction": 32039, "atorial": 32040, "ðŁēªðŁı¼": 32041, "twinpeaks": 32042, "fayette": 32043, "aor": 32044, "kok": 32045, "broom": 32046, "syfy": 32047, "ouse": 32048, "amag": 32049, "Ø·": 32050, "ubisoft": 32051, "lulu": 32052, "hallmark": 32053, "stuart": 32054, "itya": 32055, "sideline": 32056, "vengeance": 32057, "relu": 32058, "sexism": 32059, "bouncing": 32060, "unites": 32061, "gustav": 32062, "tessa": 32063, "stump": 32064, "proclamation": 32065, "imax": 32066, "dividend": 32067, "colby": 32068, "ðŁįİ": 32069, "playwright": 32070, "unsafe": 32071, "cosmo": 32072, "ðŁĩ²ðŁĩ½": 32073, "cupboard": 32074, "constituents": 32075, "anglia": 32076, "rampage": 32077, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, "thanked": 32079, "takeaways": 32080, "shroff": 32081, "debat": 32082, "khur": 32083, "conducts": 32084, "formats": 32085, "Ć Ā©": 32086, "portage": 32087, "graphers": 32088, "uten": 32089, "prem": 32090, "moines": 32091, "condemns": 32092, "sous": 32093, "lps": 32094, "fcs": 32095, "dealership": 32096, "leukemia": 32097, "bureau": 32098, "skid": 32099, "guardiola": 32100, "caster": 32101, "third": 32102, "avoided": 32103, "encyclo": 32104, "csr": 32105, "vixx": 32106, "analyzing": 32107, "shear": 32108, "duluth": 32109, "shapiro": 32110, "chanting": 32111, "stresses": 32112, "asbe": 32113, "militia": 32114, "ãĄª": 32115, "collin": 32116, "arsene": 32117, "suresh": 32118, "teachings": 32119, "yixing": 32120, "shill": 32121, "nudes": 32122, "svu": 32123, "clearwater": 32124, "warped": 32125, "prolife": 32126, "artistson": 32127, "itu": 32128, "versailles": 32129, "galaxy": 32130, "axel": 32131, "springst": 32132, "cala": 32133, "huhu": 32134, "scu": 32135, "commitments": 32136, "exeter": 32137, "poignant": 32138, "motion": 32139, "conservatory": 32140, "rowdy": 32141, "recalled": 32142, "musk": 32143, "embelli": 32144, "sothe": 32145, "âĺĢ": 32146, "stopper": 32147, "schild": 32148, "tope": 32149, "elmo": 32150, "ziel": 32151, "jom": 32152, "barnsley": 32153, "snowden": 32154, "ontour": 32155, "journey": 32156, "hillsborough": 32157, "parole": 32158, "wts": 32159, "moving": 32160, "agility": 32161, "tivo": 32162, "ffers": 32163, "kindleunlimited": 32164, "gwen": 32165, "annan": 32166, "ahmad": 32167, "textured": 32168, "hepatitis": 32169, "dram": 32170, "insiders": 32171, "tissues": 32172, "ãĄĦ": 32173, "fcbarcelona": 32174, "cratic": 32175, "naacp": 32176, "pecan": 32177, "fgm": 32178, "customize": 32179, "concert": 32180, "gsm": 32181, "peg": 32182, "pone": 32183, "justintrudeau": 32184, "supercars": 32185, "happyholidays": 32186, "bular": 32187, "adox": 32188, "laptops": 32189, "digitalhealth": 32190, "destination": 32191, "gradually": 32192, "ÔĄ¦": 32193, "poppy": 32194, "ssl": 32195, "inhibit": 32196, "starlight": 32197, "offro": 32198, "gloomy": 32199, "xper": 32200, "halder": 32201, "implants": 32202, "leto": 32203, "hassel": 32204, "aas": 32205, "untold": 32206, "enci": 32207, "liberia": 32208, "oran": 32209, "contests": 32210, "ilah": 32211, "smag": 32212, "scout": 32213, "marianne": 32214, "cryo": 32215, "scheduling": 32216, "los": 32217, "kane": 32218, "stuttgart": 32219, "nese": 32220, "lawrence": 32221, "dain": 32222, "photom": 32223, "carou": 32224, "Ć ĀøĀ£": 32225, "gwy": 32226, "nationaldogday": 32227, "roasting": 32228, "bandcamp": 32229, "kentucky": 32230, "stretches": 32231, "kerel": 32232, "cashe": 32233, "ãĤ¸": 32234, "stax": 32235, "transi": 32236, "doggie": 32237, "atric": 32238, "halle": 32239, "civic": 32240, "browning": 32241, "leinster": 32242, "catday": 32243, "highland": 32244, "joyous": 32245, "incumb": 32246, "orlando": 32247, "romo": 32248, "colton": 32249, "delta": 32250, "carab": 32251, "rotc": 32252, "asteroid": 32253, "goosebumps": 32254, "mology": 32255, "yoko": 32256, "ands": 32257, "tomorrows": 32258, "redcarpet": 32259, "smp": 32260, "casio": 32261, "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, "seau": 32263, "rejection": 32264, "rotating": 32265, "bipartisan": 32266, "thun": 32267, "mati": 32268, "boni": 32269, "oll": 32270, "energye": 32271, "doit": 32272, "lj": 32273, "motherhood": 32274, "louise": 32275, "necklaces": 32276, "elite": 32277, "nix": 32278, "lcs": 32279, "env": 32280, "glu": 32281, "lesh": 32282, "crank": 32283, "susie": 32284, "mclau": 32285, "sotu": 32286, "crowley": 32287, "ratri": 32288, "used": 32289, "breton": 32290, "alfredo": 32291, "yeo": 32292, "travelpics": 32293, "tipp": 32294, "ellison": 32295, "saxophone": 32296, "mered": 32297, "heughan": 32298, "taine": 32299, "fes": 32300, "viro": 32301, "supposedly": 32302, "ias": 32303, "digestive": 32304, "yle": 32305, "lizzy": 32306, "wildlifephotography": 32307, "brianna": 32308, "westfield": 32309, "rained": 32310, "amher": 32311, "ðŁĺĦðŁĺĦ": 32312, "distribute": 32313, "bottom": 32314, "preserving": 32315, "oiland": 32316, "crafty": 32317, "descen": 32318, "colling": 32319, "shakespearesunday": 32320, "rwc": 32321, "angled": 32322, "cian": 32323, "tations": 32324, "montage": 32325, "meyers": 32326, "francesca": 32327, "ðŁĮ·": 32328, "wiggins": 32329, "sanford": 32330, "volunteer": 32331, "carra": 32332, "bark": 32333, "varied": 32334, "plin": 32335, "amu": 32336, "kapil": 32337, "rockers": 32338, "quind": 32339, "brane": 32340, "inmate": 32341, "ental": 32342, "improvis": 32343, "michigan": 32344, "retweeting": 32345, "progressing": 32346, "mercedesbenz": 32347, "smoker": 32348, "physiology": 32349, "dorado": 32350, "wattpad": 32351, "hwa": 32352, "srbachchan": 32353, "wga": 32354, "volatility": 32355, "hire": 32356, "acap": 32357, "wnba": 32358, "heinz": 32359, "stitches": 32360, "kidnapping": 32361, "burys": 32362, "limb": 32363, "fitters": 32364, "thumbnail": 32365, "tone": 32366, "mirand": 32367, "desirable": 32368, "addison": 32369, "taran": 32370, "tamilnadu": 32371, "spectator": 32372, "sociology": 32373, "amitshah": 32374, "remotely": 32375, "âϦ": 32376, "hamid": 32377, "rds": 32378, "glee": 32379, "smoothly": 32380, "schro": 32381, "erc": 32382, "laliga": 32383, "heals": 32384, "usf": 32385, "nishi": 32386, "dhu": 32387, "unil": 32388, "hle": 32389, "tromb": 32390, "bhutan": 32391, "pilipinas": 32392, "seung": 32393, "whitman": 32394, "tey": 32395, "mince": 32396, "snowboarding": 32397, "reau": 32398, "kker": 32399, "avo": 32400, "zachary": 32401, "ranveer": 32402, "tik": 32403, "govern": 32404, "qual": 32405, "becky": 32406, "anthropology": 32407, "atten": 32408, "groceries": 32409, "debit": 32410, "warp": 32411, "silicon": 32412, "hawaii": 32413, "ðŁēħ": 32414, "pomegranate": 32415, "peer": 32416, "oranges": 32417, "peopleschoice": 32418, "endure": 32419, "ðŁēĽðŁēĽ": 32420, "ãĤ¹ãĄ": 32421, "acial": 32422, "ahaha": 32423, "stuk": 32424, "imperial": 32425, "blond": 32426, "powder": 32427, "knots": 32428, "vince": 32429, "woodlands": 32430, "dena": 32431, "watchin": 32432, "matcha": 32433, "mahat": 32434, "galaxies": 32435, "middlesbrough": 32436, "kö": 32437, "stree": 32438, "rescues": 32439, "waldo": 32440, "leroy": 32441, "despic": 32442, "realities": 32443, "tmnt": 32444, "haq": 32445, "uno": 32446, "pec": 32447, "bollywood": 32448, "blinds": 32449, "designthinking": 32450, "hems": 32451, "andhra": 32452, "absen": 32453, "fans": 32454, "stech": 32455, "shirehour": 32456, "blaine": 32457, "shakti": 32458, "purely": 32459, "ðŁıı": 32460, "trafal": 32461, "keynes": 32462, "grate": 32463, "tobias": 32464, "spontaneous": 32465, "saturated": 32466, "cavalry": 32467, "prisc": 32468, "ðŁĺij": 32469, "wht": 32470, "passi": 32471, "~~~": 32472, "virat": 32473, "pattinson": 32474, "lao": 32475, "weirdo": 32476, "sympathy": 32477, "juda": 32478, "occasionally": 32479, "credited": 32480, "statu": 32481, "esco": 32482, "hilly": 32483, "escape": 32484, "discharge": 32485, "seer": 32486, "maynard": 32487, "sudbury": 32488, "zlat": 32489, "oral": 32490, "weer": 32491, "encountered": 32492, "smelling": 32493, "oversight": 32494, "ĆŖĀø": 32495, "thatcher": 32496, "mackay": 32497, "youcan": 32498, "freep": 32499, "freedoms": 32500, "prophecy": 32501, "hoe": 32502, "ishqba": 32503, "drake": 32504, "quits": 32505, "pelled": 32506, "turk": 32507, "ovi": 32508, "wesleyan": 32509, "newmusic": 32510, "legg": 32511, "cheng": 32512, "hilli": 32513, "ayy": 32514, "panties": 32515, "adversity": 32516, "adjac": 32517, "vaccination": 32518, "juke": 32519, "gac": 32520, "exceed": 32521, "timesof": 32522, "staining": 32523, "epcot": 32524, "vital": 32525, "upward": 32526, "bethesda": 32527, "apark": 32528, "mahi": 32529, "campfire": 32530, "enchanting": 32531, "rhapso": 32532, "hz": 32533, "naver": 32534, "fax": 32535, "validation": 32536, "acad": 32537, "nyr": 32538, "asym": 32539, "coordinated": 32540, "departed": 32541, "allery": 32542, "varies": 32543, "sprite": 32544, "chaplin": 32545, "ssoccer": 32546, "swat": 32547, "bret": 32548, "reluct": 32549, "tunesapp": 32550, "superstar": 32551, "reminiscing": 32552, "oco": 32553, "homegrown": 32554, "doughnut": 32555, "uncanny": 32556, "lapd": 32557, "thyroid": 32558, "!âĿ¤ï¸ı": 32559, "botanic": 32560, "bres": 32561, "spade": 32562, "iste": 32563, "echoes": 32564, "dulil": 32565, "bursting": 32566, "quiero": 32567, "ðŁijİ": 32568, "loyola": 32569, "amusement": 32570, "hails": 32571, "sleepy": 32572, "burglary": 32573, "âľı": 32574, "rogue": 32575, "cotland": 32576, "moors": 32577, "lower": 32578, "wicked": 32579, "ðŁĶĬ": 32580, "competiti": 32581, "argentine": 32582, "yvonne": 32583, "kartikeyan": 32584, "iliary": 32585, "gatsby": 32586, "precinct": 32587, "sixty": 32588, "naji": 32589, "cams": 32590, "practitioner": 32591, "ðŁĺ³ðŁĺ³": 32592, "pune": 32593, "negli": 32594, "julien": 32595, "invaded": 32596, "calibr": 32597, "clam": 32598, "dubai": 32599, "muk": 32600, "lantic": 32601, "product": 32602, "fedex": 32603, "ï¸ı:": 32604, "eura": 32605, "darius": 32606, "sling": 32607, "virtualreality": 32608, "homestead": 32609, "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, "paced": 32611, "inha": 32612, "pulmon": 32613, "lazy": 32614, "premiering": 32615, "mastered": 32616, "inhe": 32617, "congregation": 32618, "bajo": 32619, "sporting": 32620, "newjersey": 32621, "horny": 32622, "lmaoo": 32623, "lengthy": 32624, "dut": 32625, "yogh": 32626, "swearing": 32627, "philosophical": 32628, "papua": 32629, "inski": 32630, "knowles": 32631, "dyke": 32632, "â̲": 32633, "token": 32634, "mcguire": 32635, "riot": 32636, "probability": 32637, "mccon": 32638, "gros": 32639, "sumat": 32640, "cite": 32641, "daa": 32642, "onda": 32643, "maddow": 32644, "chew": 32645, "boardgames": 32646, "sparked": 32647, "reclaimed": 32648, "adhd": 32649, "nyse": 32650, "imwithher": 32651, "equinox": 32652, "booths": 32653, "balsamic": 32654, "hazy": 32655, "dorchester": 32656, "agos": 32657, "seaw": 32658, "moderator": 32659, "seriea": 32660, "andersen": 32661, "pilgrim": 32662, "âŃIJâŃIJ": 32663, "itchen": 32664, "halli": 32665, "xton": 32666, "nathaniel": 32667, "munition": 32668, "celestial": 32669, "gaf": 32670, "zoom": 32671, "markle": 32672, "penthouse": 32673, "cale": 32674, "sfa": 32675, "barking": 32676, "tucket": 32677, "emery": 32678, "calorie": 32679, "lique": 32680, "adar": 32681, "mcnam": 32682, "tortilla": 32683, "woodpecker": 32684, "motown": 32685, "badger": 32686, "ayrshire": 32687, "scramble": 32688, "dday": 32689, "craziest": 32690, "perrie": 32691, "choco": 32692, "caste": 32693, "iot": 32694, "wrecked": 32695, "selecting": 32696, "ussr": 32697, "graft": 32698, "punt": 32699, "labou": 32700, "irst": 32701, "baek": 32702, "ƛĮ": 32703, "suki": 32704, "queu": 32705, "achat": 32706, "tester": 32707, "augmented": 32708, "wcvb": 32709, "sinks": 32710, "ðŁĵ»": 32711, "rake": 32712, "interne": 32713, "because": 32714, "bellevue": 32715, "unearth": 32716, "lighten": 32717, "ðŁĺ£": 32718, "turnaround": 32719, "labeled": 32720, "unemployed": 32721, "twitterkurds": 32722, "leia": 32723, "hye": 32724, "greater": 32725, "ðŁIJİ": 32726, "timed": 32727, "ired": 32728, "ett": 32729, "limitations": 32730, "cabe": 32731, "sout": 32732, "beech": 32733, "annihil": 32734, "retrac": 32735, "yoona": 32736, "anger": 32737, "dennis": 32738, "supplying": 32739, "diz": 32740, "\"(": 32741, "scur": 32742, "gunman": 32743, "suho": 32744, "sauvignon": 32745, "Ć ĀøĀ„": 32746, "wiley": 32747, "landon": 32748, "choreography": 32749, "prehistoric": 32750, "ðŁıĄ": 32751, "vargas": 32752, "assessments": 32753, "pinnacle": 32754, "dii": 32755, "chamberlain": 32756, "ìĪ": 32757, "vp": 32758, "presenters": 32759, "deutsche": 32760, "sunshine": 32761, "salutes": 32762, "rone": 32763, "busiest": 32764, "-.-": 32765, "motorists": 32766, "hemisphere": 32767, "alwx": 32768, "psp": 32769, "owa": 32770, "denying": 32771, "choc": 32772, "gutier": 32773, "hanuk": 32774, "muskete": 32775, "jaitley": 32776, "sewage": 32777, "tame": 32778, "thinkers": 32779, "shim": 32780, "sequo": 32781, "papar": 32782, "middleeast": 32783, "kwa": 32784, "keg": 32785, "patagonia": 32786, "noy": 32787, "barça": 32788, "takeoff": 32789, "hea": 32790, "à¬": 32791, "nsc": 32792, "gdc": 32793, "ðŁijĪ": 32794, "moustache": 32795, "melania": 32796, "thra": 32797, "â¬Ĩï¸ı": 32798, "pierced": 32799, "zeus": 32800, "fonts": 32801, "bera": 32802, "itiner": 32803, "qatar": 32804, "contrary": 32805, "ireland": 32806, "ify": 32807, "oulos": 32808, "communal": 32809, "fins": 32810, "unpaid": 32811, "paa": 32812, "ðŁijĩðŁı»": 32813, "rios": 32814, "oup": 32815, "filler": 32816, "cafeteria": 32817, "à¸Ń": 32818, "kasi": 32819, "caliber": 32820, "zulu": 32821, "vsco": 32822, "tsford": 32823, "dragonfly": 32824, "smokin": 32825, "pist": 32826, "psychologist": 32827, "diplomat": 32828, "webs": 32829, "buccane": 32830, "ா": 32831, "motivational": 32832, "dune": 32833, "bae": 32834, "cfs": 32835, "without": 32836, "eron": 32837, "iac": 32838, "atee": 32839, "pension": 32840, "frazier": 32841, "ensis": 32842, "skis": 32843, "parting": 32844, "gery": 32845, "territories": 32846, "nachos": 32847, "enight": 32848, "everlasting": 32849, "msdhoni": 32850, "tele": 32851, "spun": 32852, "podi": 32853, "sabah": 32854, "environmentally": 32855, "cease": 32856, "beaumont": 32857, "marta": 32858, "kelvin": 32859, "hoff": 32860, "sunil": 32861, "nda": 32862, "cob": 32863, "shale": 32864, "reedus": 32865, "unboxing": 32866, "ubio": 32867, "reopened": 32868, "nall": 32869, "capsules": 32870, "marr": 32871, "himalayas": 32872, "sweeter": 32873, "jaz": 32874, "fmr": 32875, "tweeter": 32876, "dhaka": 32877, "nau": 32878, "demi": 32879, "dfs": 32880, "taurus": 32881, "fading": 32882, "itutes": 32883, "cip": 32884, "overflow": 32885, "jeffrey": 32886, "donny": 32887, "cartunesapp": 32888, "ðŁįij": 32889, "prefecture": 32890, "danced": 32891, "cpt": 32892, "pleasing": 32893, "italk": 32894, "earthquakes": 32895, "ulation": 32896, "hio": 32897, "ãĢĭ": 32898, "antan": 32899, "nutrient": 32900, "deere": 32901, "selects": 32902, "enrichment": 32903, "riti": 32904, "trampol": 32905, "blamed": 32906, "jia": 32907, "contributors": 32908, "chesapeake": 32909, "pigeons": 32910, "tribunal": 32911, "maduro": 32912, "wsu": 32913, "ilove": 32914, "efficiently": 32915, "darcy": 32916, "warms": 32917, "arra": 32918, "ecu": 32919, "hower": 32920, "struggled": 32921, "rajinikanth": 32922, "ðŁĺ¢ðŁĺ¢": 32923, "housing": 32924, "strat": 32925, "elix": 32926, "dispro": 32927, "raffic": 32928, "thierry": 32929, "nasty": 32930, "cfb": 32931, "staffing": 32932, "alma": 32933, "backers": 32934, "henson": 32935, "skywalker": 32936, "realestate": 32937, "roos": 32938, "nessy": 32939, "chance": 32940, "cairns": 32941, "cci": 32942, "pedal": 32943, "lyft": 32944, "crossword": 32945, "waiter": 32946, "onlyin": 32947, "kruger": 32948, "kir": 32949, "alejandro": 32950, "cartier": 32951, "carrera": 32952, "repaired": 32953, "ouat": 32954, "unclear": 32955, "unbreakable": 32956, "todayin": 32957, "queries": 32958, "jody": 32959, "genital": 32960, "winner": 32961, "tol": 32962, "kelowna": 32963, "fascinated": 32964, "ãĄ¬": 32965, "srisri": 32966, "squared": 32967, "sprung": 32968, "negotiate": 32969, "privately": 32970, "aven": 32971, ">>>>>": 32972, "gical": 32973, "gavin": 32974, "chesterfield": 32975, "zumba": 32976, "orr": 32977, "natalia": 32978, "impeachment": 32979, "mnl": 32980, "carat": 32981, "critique": 32982, "credible": 32983, "tracy": 32984, "tani": 32985, "musik": 32986, "jigsaw": 32987, "gambia": 32988, "tolkien": 32989, "feu": 32990, "asper": 32991, "savory": 32992, "foxx": 32993, "fitt": 32994, "marlon": 32995, "lrt": 32996, "vell": 32997, "pbr": 32998, "imprisoned": 32999, "iom": 33000, "chul": 33001, "windshield": 33002, "kaye": 33003, "baa": 33004, "chord": 33005, "sart": 33006, "algon": 33007, "ministerial": 33008, "natgeo": 33009, "lazio": 33010, "norms": 33011, "ðŁijįðŁijį": 33012, "licking": 33013, "futbol": 33014, "unsung": 33015, "dallascowboys": 33016, "shred": 33017, "disturb": 33018, "devine": 33019, "beards": 33020, "chf": 33021, "bday": 33022, "rosso": 33023, "igor": 33024, "ayi": 33025, "siren": 33026, "kair": 33027, "stiles": 33028, "rof": 33029, "magnets": 33030, "uncover": 33031, "mouse": 33032, "banging": 33033, "sighted": 33034, "speople": 33035, "impact": 33036, "rowland": 33037, "kira": 33038, "environment": 33039, "lovethe": 33040, "psis": 33041, "mishra": 33042, "glendale": 33043, "cajun": 33044, "oche": 33045, "deception": 33046, "sexist": 33047, "straws": 33048, "sga": 33049, "buffer": 33050, "apostle": 33051, "spl": 33052, "popup": 33053, "ðŁļĹ": 33054, "rg": 33055, "uper": 33056, "ballin": 33057, "idy": 33058, "occasional": 33059, "nationalpark": 33060, "ðŁıĬ": 33061, "uan": 33062, "innovation": 33063, "Ć ĀøĀ«": 33064, "teaparty": 33065, "rette": 33066, "counterfe": 33067, "bha": 33068, "recs": 33069, "igen": 33070, "ðŁĮIJ": 33071, "hummingbird": 33072, "cur": 33073, "haven": 33074, "lazar": 33075, "pueblo": 33076, "::": 33077, "zionist": 33078, "opath": 33079, "inverness": 33080, "promoter": 33081, "cartoon": 33082, "cabinets": 33083, "mahogany": 33084, "surveying": 33085, "rational": 33086, "feeling": 33087, "testify": 33088, "sow": 33089, "ocon": 33090, "Ć ĀøĀ¢": 33091, "neel": 33092, "maris": 33093, "solitary": 33094, "chemo": 33095, "radcliffe": 33096, "simons": 33097, "rosary": 33098, "newer": 33099, "jodie": 33100, "retali": 33101, "prawn": 33102, "paddy": 33103, "henge": 33104, "kala": 33105, "implant": 33106, "aty": 33107, "brentwood": 33108, "paradox": 33109, "enez": 33110, "redesigned": 33111, "pour": 33112, "wyd": 33113, "alde": 33114, "Ć ĀÆÄ£": 33115, "sold": 33116, "biomedical": 33117, "à¹Ĥ": 33118, "tttt": 33119, "matteo": 33120, "yser": 33121, "newton": 33122, "debun": 33123, "nerdy": 33124, "lool": 33125, "woon": 33126, "elisabeth": 33127, "ecc": 33128, "whi": 33129, "acho": 33130, "salvage": 33131, "salaries": 33132, "quity": 33133, "navigating": 33134, "ophthal": 33135, "consoles": 33136, "rebuilt": 33137, "opec": 33138, "asters": 33139, "shored": 33140, "setlist": 33141, "kathryn": 33142, "rhymes": 33143, "revisiting": 33144, "ashish": 33145, "lift": 33146, "repost": 33147, "soleil": 33148, "âı±": 33149, "wealth": 33150, "saat": 33151, "wec": 33152, "kingjames": 33153, "flipkart": 33154, "fieldwork": 33155, "segu": 33156, "modal": 33157, "bub": 33158, "arers": 33159, "ðŁįē": 33160, "clooney": 33161, "paddington": 33162, "necessity": 33163, "guthrie": 33164, "pente": 33165, "limo": 33166, "josie": 33167, "artin": 33168, "enc": 33169, "lhs": 33170, "betrayal": 33171, "infographics": 33172, "ier": 33173, "moa": 33174, "hearings": 33175, "bonjour": 33176, "symbolic": 33177, "agro": 33178, "wedges": 33179, "kristina": 33180, "wildflower": 33181, "athletic": 33182, "photography": 33183, "pesh": 33184, "cahill": 33185, "chilean": 33186, "goul": 33187, "fioren": 33188, "ðŁij¶": 33189, "zil": 33190, "skim": 33191, "badoo": 33192, "delia": 33193, "treble": 33194, "ncc": 33195, "ðŁĩ¦ðŁĩ": 33196, "ahouse": 33197, "bullock": 33198, "solitude": 33199, "Ć˜Ā§Ć™ÄØ": 33200, "cancers": 33201, "futureofwork": 33202, "hutch": 33203, "watershed": 33204, "warmongers": 33205, "spilled": 33206, "colombo": 33207, "moth": 33208, "associations": 33209, "weighed": 33210, "globalgoals": 33211, "notjust": 33212, "christi": 33213, "torg": 33214, "sweating": 33215, "maneu": 33216, "clusters": 33217, "â̼ï¸ıâ̼ï¸ı": 33218, "taped": 33219, "uly": 33220, "trusting": 33221, "yusuf": 33222, "tein": 33223, "rab": 33224, ",,,,": 33225, "sinai": 33226, "audible": 33227, "explicit": 33228, "crowns": 33229, "schiz": 33230, "atleast": 33231, "ðŁĹ£": 33232, "debra": 33233, "jesuit": 33234, "enegger": 33235, "zhen": 33236, "onesie": 33237, "iit": 33238, "ssf": 33239, "gurgaon": 33240, "chakra": 33241, "bearcats": 33242, "kran": 33243, "kawa": 33244, "requesting": 33245, "hanover": 33246, "gend": 33247, "soros": 33248, "mercy": 33249, "lovely": 33250, "doomed": 33251, "timmy": 33252, "kuz": 33253, "ull": 33254, "abram": 33255, "saison": 33256, "ãĄ«": 33257, "cleaners": 33258, "remo": 33259, "circuits": 33260, "barred": 33261, "oth": 33262, "moist": 33263, "madeleine": 33264, "gallo": 33265, "uj": 33266, "permits": 33267, "heaviest": 33268, "carols": 33269, "azte": 33270, "giorgio": 33271, "floats": 33272, "declaring": 33273, "usrc": 33274, "minat": 33275, "crafts": 33276, "prima": 33277, "conveni": 33278, "nickelodeon": 33279, "dancing": 33280, "ceremonial": 33281, "blogg": 33282, "twp": 33283, "anglican": 33284, "shek": 33285, "knick": 33286, "(((": 33287, "hubbard": 33288, "harvey": 33289, "hitman": 33290, "feng": 33291, "wesome": 33292, "forza": 33293, "sword": 33294, "opus": 33295, "brom": 33296, "gibility": 33297, "zal": 33298, "munch": 33299, "dancehall": 33300, "greedy": 33301, "hdmi": 33302, "rebirth": 33303, "ðŁĺĭðŁĺĭ": 33304, "sworld": 33305, "figurine": 33306, "compost": 33307, "kf": 33308, "engraving": 33309, "giorno": 33310, "stana": 33311, "kman": 33312, "hamster": 33313, "composers": 33314, "aje": 33315, "functionality": 33316, "polk": 33317, "isons": 33318, "airplanes": 33319, "tese": 33320, "horrors": 33321, "muscat": 33322, "given": 33323, "spence": 33324, "ðŁĩ¸ðŁĩ": 33325, "eliot": 33326, "achilles": 33327, "freck": 33328, "cryptocurrencies": 33329, "souther": 33330, "halo": 33331, "borneo": 33332, "politic": 33333, "hahahahah": 33334, "upstate": 33335, "siena": 33336, "obscure": 33337, "hausen": 33338, "lloyd": 33339, "happyfriday": 33340, "motorbike": 33341, "bona": 33342, "americas": 33343, "hols": 33344, "-(": 33345, "sporty": 33346, "unaware": 33347, "revenues": 33348, "christopher": 33349, "banksy": 33350, "avan": 33351, "evapor": 33352, "compress": 33353, "eyeliner": 33354, "todos": 33355, "buffy": 33356, "renewableenergy": 33357, "lyrical": 33358, "archan": 33359, "rapist": 33360, "fairtrade": 33361, "lmaooo": 33362, "beatz": 33363, "proactive": 33364, "lapse": 33365, "irical": 33366, "reversal": 33367, "pode": 33368, "mcintyre": 33369, "macau": 33370, "ãĄķãĤ": 33371, "nashgrier": 33372, "fsa": 33373, "gall": 33374, "çĶŁ": 33375, "perpetr": 33376, "ilya": 33377, "configuration": 33378, "%;": 33379, "strange": 33380, "raci": 33381, "Ć ĀøÄ©": 33382, "pickups": 33383, "kovsky": 33384, "mammal": 33385, "wps": 33386, "gable": 33387, "comparative": 33388, "zh": 33389, "saveour": 33390, "davey": 33391, "onetsy": 33392, "mussels": 33393, "miser": 33394, "cristina": 33395, "electron": 33396, "crave": 33397, "loren": 33398, "precipitation": 33399, "mz": 33400, "ðŁį«": 33401, "vincen": 33402, "snowboard": 33403, "noida": 33404, "ahn": 33405, "marinated": 33406, "gtr": 33407, "townhall": 33408, "minis": 33409, "bethel": 33410, "advan": 33411, "sura": 33412, "shiel": 33413, "furry": 33414, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, "lynd": 33416, "soil": 33417, "scence": 33418, "seneca": 33419, "sharjah": 33420, "dickens": 33421, "credentials": 33422, "avar": 33423, "perk": 33424, "requiring": 33425, "prefer": 33426, "jian": 33427, "deca": 33428, "rach": 33429, "ingfor": 33430, "dele": 33431, "beep": 33432, "ðŁē»": 33433, "cisely": 33434, "huddle": 33435, "greensboro": 33436, "hawking": 33437, "hoax": 33438, "hangar": 33439, "çľ": 33440, "miso": 33441, "lovin": 33442, "greta": 33443, "abad": 33444, "logie": 33445, "atan": 33446, "snowflake": 33447, "mahesh": 33448, "fearthe": 33449, "alkal": 33450, "bobblehead": 33451, "bahn": 33452, "judged": 33453, "futu": 33454, "felix": 33455, "ðŁįĵ": 33456, "pike": 33457, "deriv": 33458, "notices": 33459, "auer": 33460, "dissuper": 33461, "orda": 33462, "wipes": 33463, "amino": 33464, "strikers": 33465, "footb": 33466, "dramas": 33467, "punching": 33468, "scoreless": 33469, "hemingway": 33470, "bih": 33471, "ballad": 33472, "chatter": 33473, "ammo": 33474, "klein": 33475, "fabrication": 33476, "karim": 33477, "zend": 33478, "histo": 33479, "volta": 33480, "rocky": 33481, "marketer": 33482, "xtreme": 33483, "sequencing": 33484, "paradigm": 33485, "cleats": 33486, "booming": 33487, "âģłâģł": 33488, "blockade": 33489, "prompts": 33490, "yoghurt": 33491, "purpose": 33492, "nur": 33493, "regulate": 33494, "noisy": 33495, "ingrid": 33496, "birdwatching": 33497, "bartender": 33498, "ƙĄ": 33499, "wordof": 33500, "chaotic": 33501, "shorty": 33502, "eldest": 33503, "zapp": 33504, "onceuponatime": 33505, "flyo": 33506, "ritos": 33507, "mikequind": 33508, "ðŁIJ“": 33509, "registering": 33510, ".]": 33511, "adol": 33512, "gggg": 33513, "purge": 33514, "kidlit": 33515, "arbor": 33516, "valves": 33517, "synagogue": 33518, "oth": 33519, "unanimous": 33520, "verification": 33521, "darrell": 33522, "ãģĦ": 33523, "vanderbilt": 33524, "tapestry": 33525, "prosper": 33526, "diddy": 33527, "drafting": 33528, "decep": 33529, "marquis": 33530, "stint": 33531, "michaeljackson": 33532, "peeled": 33533, "menus": 33534, "bbb": 33535, "scare": 33536, "email": 33537, "wrigley": 33538, "itis": 33539, "fell": 33540, "somethin": 33541, "barra": 33542, "edgar": 33543, "dipping": 33544, "puddle": 33545, "slade": 33546, "learner": 33547, "jalen": 33548, "ð٧IJ": 33549, "thedaily": 33550, "mikequindazzi": 33551, "jux": 33552, "iqbal": 33553, "mckinney": 33554, "raiser": 33555, "efan": 33556, "drone": 33557, "cato": 33558, "picket": 33559, "crowe": 33560, "latt": 33561, "uko": 33562, "giuseppe": 33563, "hini": 33564, "synthesi": 33565, "pontifex": 33566, "songwriting": 33567, "tod": 33568, "switches": 33569, "dinners": 33570, "hq": 33571, "gabrielle": 33572, "pensacola": 33573, "circle": 33574, "exposes": 33575, "evs": 33576, "riyadh": 33577, "promen": 33578, "ock": 33579, "saj": 33580, "citation": 33581, "brewco": 33582, "josi": 33583, "epaper": 33584, "drif": 33585, "pointless": 33586, "tangled": 33587, "cripp": 33588, "lineups": 33589, "fairies": 33590, "daze": 33591, "mourn": 33592, "bladder": 33593, "salz": 33594, "burundi": 33595, "bookmark": 33596, "thepeople": 33597, "subsequ": 33598, "principal": 33599, "sker": 33600, "courtney": 33601, "aoki": 33602, "racers": 33603, "adm": 33604, "moma": 33605, "criticalrole": 33606, "houn": 33607, "shedding": 33608, "saka": 33609, "aceous": 33610, "mckay": 33611, "husbands": 33612, "½": 33613, "meda": 33614, "accusations": 33615, "rosel": 33616, "ncis": 33617, "witnessing": 33618, "orama": 33619, "gods": 33620, "hilton": 33621, "elman": 33622, "ÃŃn": 33623, "megap": 33624, "craven": 33625, "announcer": 33626, "criteri": 33627, "sheffieldissuper": 33628, "militant": 33629, "consul": 33630, "hooded": 33631, "abyss": 33632, "bx": 33633, "madam": 33634, "locu": 33635, "maryam": 33636, "manicure": 33637, "gratis": 33638, "actresses": 33639, "rosario": 33640, "thisdayin": 33641, "kingly": 33642, "gnome": 33643, "celine": 33644, "rous": 33645, "heel": 33646, "lilac": 33647, "vishal": 33648, "abh": 33649, "thorns": 33650, "sls": 33651, "neal": 33652, "constructing": 33653, "beren": 33654, "slang": 33655, "mains": 33656, "farra": 33657, "sarko": 33658, "paige": 33659, "guiller": 33660, "lala": 33661, "iceberg": 33662, "noun": 33663, "planners": 33664, "ummm": 33665, "ouses": 33666, "illary": 33667, "maan": 33668, "boxing": 33669, "zipper": 33670, "srinagar": 33671, "miguel": 33672, "ostr": 33673, "mpo": 33674, "responsibly": 33675, "lanterns": 33676, "appliance": 33677, "xb": 33678, "grenade": 33679, "neglect": 33680, "dysle": 33681, "hammock": 33682, "nectar": 33683, "witcher": 33684, "rgv": 33685, "dience": 33686, "serbian": 33687, "seeded": 33688, "cruz": 33689, "bish": 33690, "sphe": 33691, "eq": 33692, "skyrim": 33693, "algebra": 33694, "philately": 33695, "bungalow": 33696, "geoff": 33697, "yves": 33698, "demanded": 33699, "considerations": 33700, "thevamp": 33701, "pawankalyan": 33702, "coded": 33703, "gritty": 33704, "eruption": 33705, "seinfeld": 33706, "unidenti": 33707, "ëĭĪ": 33708, "worm": 33709, "acus": 33710, "seung": 33711, "dung": 33712, "roland": 33713, "sud": 33714, "divisions": 33715, "ablanc": 33716, "shortest": 33717, "jf": 33718, "poun": 33719, "plantbased": 33720, "beto": 33721, "tougher": 33722, "mco": 33723, "donet": 33724, "markus": 33725, "vfl": 33726, "ðŁıł": 33727, "opening": 33728, "coward": 33729, "cabernet": 33730, "oxi": 33731, "burlesque": 33732, "sandra": 33733, "sumo": 33734, "consist": 33735, "thot": 33736, "cayman": 33737, "motorola": 33738, "gutierrez": 33739, "dslr": 33740, "yw": 33741, "nobel": 33742, "novice": 33743, "momsdemand": 33744, "grunge": 33745, "spor": 33746, "dcc": 33747, "presses": 33748, "slist": 33749, "allotment": 33750, "vocational": 33751, "ftc": 33752, "puja": 33753, "loven": 33754, "uttarak": 33755, "tandem": 33756, "shep": 33757, "comedians": 33758, "anatom": 33759, "cantwait": 33760, "healthyeating": 33761, "westside": 33762, "margins": 33763, "chiang": 33764, "asbestos": 33765, "stupidity": 33766, "problematic": 33767, "fitbit": 33768, ":$": 33769, "ceilings": 33770, "shua": 33771, "protections": 33772, "biotic": 33773, "bengali": 33774, "rests": 33775, "biennale": 33776, "timo": 33777, "culmin": 33778, "eminent": 33779, "affection": 33780, "unbelievably": 33781, "individually": 33782, "canvassing": 33783, "whitt": 33784, "novasco": 33785, "chinson": 33786, "hpe": 33787, "gow": 33788, "gloucestershire": 33789, "pao": 33790, "threshold": 33791, "chevron": 33792, "sine": 33793, "wether": 33794, "ppie": 33795, "aquino": 33796, "antwerp": 33797, "âĸ¬": 33798, "poon": 33799, "instaf": 33800, "equine": 33801, "cinematography": 33802, "nbafinals": 33803, "valiant": 33804, "kilkenny": 33805, "terence": 33806, "systemic": 33807, "srl": 33808, "pound": 33809, "madeira": 33810, "plough": 33811, "trecht": 33812, "mated": 33813, "mpd": 33814, "ransomware": 33815, "phin": 33816, "liqui": 33817, "bbce": 33818, "boomer": 33819, "istandwith": 33820, "conju": 33821, "rte": 33822, "nara": 33823, "foolish": 33824, "dashing": 33825, "viernes": 33826, "brite": 33827, "dau": 33828, "juniper": 33829, "aida": 33830, "younow": 33831, "razer": 33832, "dei": 33833, "repeating": 33834, "comforting": 33835, "adjacent": 33836, "eto": 33837, "casted": 33838, "chatur": 33839, "muer": 33840, "synth": 33841, "sanitary": 33842, "macle": 33843, "independent": 33844, "lawful": 33845, "eerie": 33846, "hor": 33847, "ðŁēŃ": 33848, "amrit": 33849, "velo": 33850, "stationery": 33851, "muf": 33852, "maymay": 33853, "contemplating": 33854, "elaborate": 33855, "gregor": 33856, "dries": 33857, "accol": 33858, "à¸ļ": 33859, "schwarzenegger": 33860, "illnesses": 33861, "daybreak": 33862, "followback": 33863, "collusion": 33864, "electronic": 33865, "jovi": 33866, "hiroshima": 33867, "taw": 33868, "homec": 33869, "micah": 33870, "quitting": 33871, "frosting": 33872, "benfica": 33873, "heli": 33874, "sical": 33875, "piccad": 33876, "corporate": 33877, "mentorship": 33878, "youare": 33879, "singer": 33880, "shiva": 33881, "rune": 33882, "inger": 33883, "rium": 33884, "playable": 33885, "doop": 33886, "willow": 33887, "terre": 33888, "nip": 33889, "atd": 33890, "warbler": 33891, "professionally": 33892, "erase": 33893, "proceed": 33894, "pedestrians": 33895, "mischief": 33896, "bending": 33897, "alaskan": 33898, "ckett": 33899, "mop": 33900, "ddles": 33901, "shutter": 33902, "geared": 33903, "ateneo": 33904, "madeline": 33905, "gations": 33906, "osha": 33907, "derick": 33908, "swild": 33909, "angry": 33910, "patents": 33911, "hunk": 33912, "decreased": 33913, "fry": 33914, "ðŁēĸðŁēĸðŁēĸ": 33915, "salon": 33916, "quantities": 33917, "dario": 33918, "nigel": 33919, "kuma": 33920, "jenn": 33921, "happye": 33922, "xxx": 33923, "rexperience": 33924, "pros": 33925, "ausch": 33926, "relessly": 33927, "hamburger": 33928, "fukushima": 33929, "erne": 33930, "statec": 33931, "rend": 33932, "mayfield": 33933, "jone": 33934, "lefty": 33935, "bernstein": 33936, "smil": 33937, "generates": 33938, "forestation": 33939, "bandits": 33940, "tayo": 33941, "rca": 33942, "acci": 33943, "rodrigo": 33944, "knapp": 33945, "elovers": 33946, "vegetation": 33947, "ural": 33948, "left": 33949, "ħï¸ı": 33950, "worldre": 33951, "suri": 33952, "embark": 33953, "wson": 33954, "bayou": 33955, "muller": 33956, "movers": 33957, "ðŁķº": 33958, "presbyter": 33959, "lf": 33960, "cree": 33961, "batb": 33962, "salam": 33963, "demonstrations": 33964, "anec": 33965, "npc": 33966, "itics": 33967, "tography": 33968, "reinst": 33969, "thurst": 33970, "tale": 33971, "offences": 33972, "smartcity": 33973, "brotha": 33974, "oftheyear": 33975, "invaluable": 33976, "earn": 33977, "ðŁijıðŁı½": 33978, "kremlin": 33979, "grady": 33980, "townfc": 33981, "guernsey": 33982, "maha": 33983, "contagious": 33984, "drex": 33985, "been": 33986, "(£": 33987, "nativity": 33988, "ktm": 33989, "somerhalder": 33990, "compounds": 33991, "Ć­Ä·Äŗ": 33992, "\"â̦": 33993, "afg": 33994, "ottnews": 33995, "hound": 33996, "firefly": 33997, "cilan": 33998, "donetsk": 33999, "volunteered": 34000, "akira": 34001, "ĆØĀŖ": 34002, "singul": 34003, "sth": 34004, "drowned": 34005, "mando": 34006, "heir": 34007, "ðŁİīðŁİĪ": 34008, "taxis": 34009, "yuki": 34010, "veld": 34011, "kans": 34012, "elk": 34013, "rants": 34014, "hashtag": 34015, "teng": 34016, "rog": 34017, "aat": 34018, "grub": 34019, "eber": 34020, "inindia": 34021, "colossus": 34022, "signi": 34023, "soever": 34024, "milestones": 34025, "dero": 34026, "differential": 34027, "phuket": 34028, "mastermind": 34029, "angh": 34030, "melani": 34031, "broker": 34032, "actorvijay": 34033, "stunned": 34034, "continuity": 34035, "affl": 34036, "vocal": 34037, "perennial": 34038, "fiancé": 34039, "incomplete": 34040, "hunts": 34041, "reissue": 34042, "dominates": 34043, "turmeric": 34044, "roam": 34045, "rion": 34046, "bagged": 34047, "nassau": 34048, "fut": 34049, "xox": 34050, "nationaltrust": 34051, "joye": 34052, "sano": 34053, "hearthstone": 34054, "disrespect": 34055, "lees": 34056, "hse": 34057, "siberian": 34058, "offee": 34059, "restock": 34060, "wolfgang": 34061, "regan": 34062, "plano": 34063, "unwind": 34064, "repar": 34065, "mille": 34066, "],": 34067, "skull": 34068, "fatally": 34069, "conceptual": 34070, "ðŁĮ²": 34071, "fé": 34072, "berto": 34073, "bms": 34074, "ua": 34075, "magna": 34076, "notredame": 34077, "lete": 34078, "laundering": 34079, "heartwarming": 34080, "buffett": 34081, "goat": 34082, "peabo": 34083, "windmill": 34084, "vac": 34085, "continually": 34086, "azalea": 34087, "membrane": 34088, "cancels": 34089, "makeyourown": 34090, "athered": 34091, "pto": 34092, "torpe": 34093, "ðŁĺł": 34094, "ðŁē§": 34095, "scares": 34096, "leaking": 34097, "zet": 34098, "pixels": 34099, "aci": 34100, "khil": 34101, "marathi": 34102, "ðŁĻıðŁı½": 34103, "ula": 34104, "tamu": 34105, "chandigarh": 34106, "zagre": 34107, "aab": 34108, "pronounced": 34109, "aubrey": 34110, "sander": 34111, "punta": 34112, "harlow": 34113, "icelan": 34114, "celebratory": 34115, "sot": 34116, "unciation": 34117, "struly": 34118, "mcdowell": 34119, "deepika": 34120, "reminders": 34121, "mystical": 34122, "ctc": 34123, "chatted": 34124, "sica": 34125, "bargains": 34126, "chhat": 34127, "rubin": 34128, "mnet": 34129, "oilandgas": 34130, "pelican": 34131, "oat": 34132, "morality": 34133, "kour": 34134, "ih": 34135, "nuclear": 34136, "gcu": 34137, "richer": 34138, "venezia": 34139, "mma": 34140, "leith": 34141, "accompany": 34142, "richmond": 34143, "sportsnet": 34144, "baahu": 34145, "smuggling": 34146, "mmi": 34147, "ðŁĩ®ðŁĩª": 34148, "twists": 34149, "sahib": 34150, ".....": 34151, "ambitions": 34152, "illo": 34153, "historical": 34154, "forec": 34155, "showbiz": 34156, "ponies": 34157, "chasers": 34158, "remodel": 34159, "willing": 34160, "princesses": 34161, "ample": 34162, "cushions": 34163, "acles": 34164, "lotr": 34165, "dach": 34166, "anthe": 34167, "incorporate": 34168, "newbury": 34169, "kiri": 34170, "friedrich": 34171, "abv": 34172, "ballers": 34173, "albert": 34174, "ðŁijŃ": 34175, "leti": 34176, "nanop": 34177, "cide": 34178, "analo": 34179, "nsf": 34180, "))))": 34181, "griffiths": 34182, "valenci": 34183, "roano": 34184, "funrun": 34185, "babysitting": 34186, "caday": 34187, "entre": 34188, "uck": 34189, "slug": 34190, "tical": 34191, "thesims": 34192, "roar": 34193, "carney": 34194, "gam": 34195, "stowe": 34196, "fid": 34197, "bunny": 34198, "shamrock": 34199, "pecu": 34200, "molina": 34201, "gocougs": 34202, "contributes": 34203, "transformation": 34204, "moy": 34205, "vaj": 34206, "severy": 34207, "antioxidants": 34208, "thirteen": 34209, "sightseeing": 34210, "lj": 34211, "reversible": 34212, "oddly": 34213, "hookah": 34214, "nouvel": 34215, "halal": 34216, "fei": 34217, "stables": 34218, "mult": 34219, "hopped": 34220, "braids": 34221, "interchange": 34222, "ghanaian": 34223, "wwww": 34224, "ethno": 34225, "conjunction": 34226, "agov": 34227, "yeti": 34228, "earthand": 34229, "tsp": 34230, "conserve": 34231, "heirloom": 34232, "metaphor": 34233, "woof": 34234, "torio": 34235, "selfless": 34236, "nwa": 34237, "emilia": 34238, "ylene": 34239, "yxe": 34240, "giar": 34241, "moderating": 34242, "probz": 34243, "bfi": 34244, "neer": 34245, "dummy": 34246, "hanukkah": 34247, "webber": 34248, "kv": 34249, "eyebrow": 34250, "dagger": 34251, "sump": 34252, "rages": 34253, "orkney": 34254, "tbo": 34255, "halsey": 34256, "assignments": 34257, "tronic": 34258, "scrib": 34259, "coon": 34260, "anwar": 34261, "#âĢİ": 34262, "jalape": 34263, "florida": 34264, "quaid": 34265, "hawkeyes": 34266, "âĻ”âĻ”": 34267, "streetcar": 34268, "rog": 34269, "datlantic": 34270, "granola": 34271, "unchanged": 34272, "expectation": 34273, "ƙĩ": 34274, "marlin": 34275, "gummy": 34276, "ðŁĻıðŁı¾": 34277, "awarenessmonth": 34278, "oilpainting": 34279, "muth": 34280, "perch": 34281, "junto": 34282, "villagers": 34283, "morg": 34284, "cheated": 34285, "webcomic": 34286, "thefuture": 34287, "dps": 34288, "lakings": 34289, "mentioning": 34290, "voor": 34291, "identities": 34292, "accord": 34293, "mcgu": 34294, "lpga": 34295, "rumour": 34296, "massively": 34297, "mpls": 34298, "healy": 34299, "date": 34300, "spoli": 34301, "revisited": 34302, "ont": 34303, "aland": 34304, "scrutiny": 34305, "lakeland": 34306, "blending": 34307, "": 34308, "ankara": 34309, "jamiedor": 34310, "metabolic": 34311, "fences": 34312, "anny": 34313, "Ƅħ": 34314, "semicon": 34315, "oott": 34316, "spaceship": 34317, "wacky": 34318, "leta": 34319, "apac": 34320, "shee": 34321, "inherit": 34322, "dores": 34323, "ðŁĩ¨ðŁĩ¦": 34324, "gente": 34325, "twick": 34326, "rims": 34327, "galve": 34328, "deville": 34329, "kingfisher": 34330, "scorpio": 34331, "owl": 34332, "alar": 34333, "varian": 34334, "ðŁĹĵ": 34335, "venetian": 34336, "stardust": 34337, "thenorth": 34338, "qing": 34339, "harrington": 34340, "consulate": 34341, "spectacle": 34342, "hobbs": 34343, "turks": 34344, "greer": 34345, "mating": 34346, "ðŁİĢ": 34347, "ðŁĮĢ": 34348, "directs": 34349, "Ć­Ä­": 34350, "pompeo": 34351, "voiced": 34352, "laos": 34353, "tzu": 34354, "prome": 34355, "prism": 34356, "merc": 34357, "fortunately": 34358, "bcfc": 34359, "mcdonnell": 34360, "notsorry": 34361, "smiled": 34362, "tba": 34363, "forwar": 34364, "midterm": 34365, "darby": 34366, "weinstein": 34367, "upgrading": 34368, "wolff": 34369, "bronco": 34370, "cabello": 34371, "ðŁ„ĩ": 34372, "fiable": 34373, "sharpe": 34374, "battered": 34375, "sato": 34376, "mythical": 34377, "instapic": 34378, "prepped": 34379, "enium": 34380, "espo": 34381, "diaper": 34382, "explanations": 34383, "whopping": 34384, "ragnar": 34385, "peel": 34386, "antibiotic": 34387, "lacks": 34388, "harrison": 34389, "lism": 34390, "aul": 34391, "quail": 34392, "martina": 34393, "sentencing": 34394, "scams": 34395, "didi": 34396, "tronics": 34397, "ãħłãħł": 34398, "goff": 34399, "zain": 34400, "paramore": 34401, "chained": 34402, "clinton": 34403, "liff": 34404, "cottages": 34405, "emon": 34406, "reverend": 34407, "consumer": 34408, "cean": 34409, "tany": 34410, "lumpur": 34411, "ebay": 34412, "stool": 34413, "ðŁĺ»ðŁĺ»": 34414, "tapro": 34415, "hath": 34416, "modernart": 34417, "justine": 34418, "proverb": 34419, "appy": 34420, "trax": 34421, "manifest": 34422, "ambu": 34423, "naik": 34424, "pepp": 34425, "rsd": 34426, "merchants": 34427, "kitchener": 34428, "shifted": 34429, "lizz": 34430, "âĺħâĺħâĺħâĺħ": 34431, "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, "utopia": 34433, "tomo": 34434, "outed": 34435, "comers": 34436, "chiropractic": 34437, "bookclub": 34438, "cindy": 34439, "prohibition": 34440, "seuss": 34441, "민": 34442, "thinkin": 34443, "rrrr": 34444, "gofund": 34445, "tack": 34446, "omb": 34447, "catastrophic": 34448, "lingu": 34449, "guildford": 34450, "botd": 34451, "à„ĭ": 34452, "planter": 34453, "^^": 34454, "wink": 34455, "kathmandu": 34456, "stoppers": 34457, "smoothies": 34458, "reefs": 34459, "hind": 34460, "bellamy": 34461, "Ħƫ": 34462, "wastewater": 34463, "voor": 34464, "natl": 34465, "!]": 34466, "reel": 34467, "yap": 34468, "scooby": 34469, "workspace": 34470, "corinthians": 34471, "blun": 34472, "obligation": 34473, "gbbo": 34474, "dyson": 34475, "cravings": 34476, "ellington": 34477, "dapl": 34478, "wrexham": 34479, "earthandclouds": 34480, "ukrunchat": 34481, "positioned": 34482, "kalb": 34483, "foursquare": 34484, "jock": 34485, "impending": 34486, "evening": 34487, "athy": 34488, "proclaimed": 34489, "cites": 34490, "annapolis": 34491, "sani": 34492, "marth": 34493, "irl": 34494, "accommo": 34495, "kaa": 34496, "fina": 34497, "yaa": 34498, "disper": 34499, "ecar": 34500, "bhak": 34501, "willy": 34502, "ðŁĺĢðŁĺĢ": 34503, "mcdermott": 34504, "moj": 34505, "generational": 34506, "usaid": 34507, "training": 34508, "lonely": 34509, "lores": 34510, "impecc": 34511, "âĢIJ": 34512, "beavers": 34513, "maki": 34514, "heb": 34515, "aapl": 34516, "Ƅı": 34517, "wolverhampton": 34518, "leaderboard": 34519, "meu": 34520, "cfa": 34521, "eastern": 34522, "hur": 34523, "civilwar": 34524, "ourage": 34525, "horned": 34526, "lehigh": 34527, "awards": 34528, "evident": 34529, "gigab": 34530, "rous": 34531, "madel": 34532, "robyn": 34533, "urgently": 34534, "kors": 34535, "enas": 34536, "heisman": 34537, "bambam": 34538, "fabian": 34539, "fom": 34540, "evaluating": 34541, "assembly": 34542, "outsourcing": 34543, "huntsville": 34544, "ðŁĶª": 34545, "justified": 34546, "cashier": 34547, "spaper": 34548, "buckeye": 34549, "analytical": 34550, "illuminati": 34551, "autho": 34552, "oj": 34553, "shade": 34554, "geelong": 34555, "whey": 34556, "heaton": 34557, "terribly": 34558, "elek": 34559, "uncharted": 34560, "sdlive": 34561, "motocross": 34562, "hermes": 34563, "darshan": 34564, "darlington": 34565, "cashmere": 34566, "gripping": 34567, "cilantro": 34568, "punish": 34569, "...:": 34570, "ðŁēĦ": 34571, "instance": 34572, "deri": 34573, "lobal": 34574, "mukher": 34575, "spar": 34576, "thinker": 34577, "fremont": 34578, "compiled": 34579, "colorado": 34580, "vigne": 34581, "smd": 34582, "whead": 34583, "village": 34584, "leek": 34585, "formulae": 34586, "tares": 34587, "persistence": 34588, "??????": 34589, "pedago": 34590, "hez": 34591, "alzheimers": 34592, "vulture": 34593, "offence": 34594, "isgreat": 34595, "suffra": 34596, "kickin": 34597, "hmmmm": 34598, "broadway": 34599, "ï¸ı@": 34600, "arti": 34601, "allison": 34602, "endorses": 34603, "ryu": 34604, "lollipop": 34605, "soybean": 34606, "kendall": 34607, "cera": 34608, "invade": 34609, "(ðŁĵ·:": 34610, "converter": 34611, "carpets": 34612, "hobo": 34613, "frit": 34614, "peac": 34615, "esqu": 34616, "ernan": 34617, "ouf": 34618, "anil": 34619, "differ": 34620, "ching": 34621, "brecht": 34622, "spg": 34623, "davenport": 34624, "strava": 34625, "severn": 34626, "ngos": 34627, "storians": 34628, "fete": 34629, "paramedic": 34630, "jhb": 34631, "alamo": 34632, "sneaking": 34633, "goldcoast": 34634, "roofs": 34635, "isil": 34636, "depicted": 34637, "projections": 34638, "numb": 34639, "oss": 34640, "epi": 34641, "glucose": 34642, "zidane": 34643, "infiniti": 34644, "íĺĦ": 34645, "ransom": 34646, "tonics": 34647, "falk": 34648, "gler": 34649, "outw": 34650, "ress": 34651, "weekly": 34652, "theon": 34653, "nole": 34654, "ðŁĩªðŁĩº": 34655, "volley": 34656, "summar": 34657, "negativity": 34658, "samson": 34659, "yew": 34660, "ausvotes": 34661, "jul": 34662, "judy": 34663, "fart": 34664, "prayed": 34665, "palate": 34666, "multicultural": 34667, "doubleheader": 34668, "cyclones": 34669, "pierre": 34670, "ãģ¨": 34671, "âĺłï¸ı": 34672, "rtw": 34673, "converting": 34674, "wirral": 34675, "lari": 34676, "irrelevant": 34677, "austinmahone": 34678, "anche": 34679, "yaan": 34680, "sdf": 34681, "$.": 34682, "exploding": 34683, "ultimate": 34684, "profici": 34685, "gofundme": 34686, "cellence": 34687, "epstein": 34688, "bullied": 34689, "septic": 34690, "த": 34691, "lumber": 34692, "cuff": 34693, "vscocam": 34694, "plor": 34695, "Ć ĀøĀ„": 34696, "seok": 34697, "roto": 34698, "venezuelan": 34699, "sorta": 34700, "spirited": 34701, "danielpadilla": 34702, "teamsisd": 34703, "radioactive": 34704, "icelandic": 34705, "ðŁē¤": 34706, "vere": 34707, "accommodate": 34708, "shipp": 34709, "otter": 34710, "olina": 34711, "ego": 34712, "sula": 34713, "sanantonio": 34714, "deas": 34715, "similarities": 34716, "âļ¾": 34717, "yom": 34718, "broward": 34719, "İ": 34720, "cancun": 34721, "verify": 34722, "onte": 34723, "candlelight": 34724, "ìłķ": 34725, "infants": 34726, "azam": 34727, "ðŁĺ°": 34728, "leven": 34729, "unstable": 34730, "bloomington": 34731, "xford": 34732, "contour": 34733, "yp": 34734, "innovator": 34735, "histories": 34736, "poy": 34737, "lololol": 34738, "expires": 34739, "catalo": 34740, "billboards": 34741, "anab": 34742, "elic": 34743, "novascotia": 34744, "faire": 34745, "ìĿ“": 34746, "rockwell": 34747, "grille": 34748, "aztec": 34749, "johor": 34750, "urstruly": 34751, "firen": 34752, "dunlop": 34753, "idle": 34754, "portman": 34755, "joes": 34756, "txhsfb": 34757, "holm": 34758, "chamele": 34759, "underworld": 34760, "loss": 34761, "tiem": 34762, "therapists": 34763, "pasture": 34764, "paste": 34765, "ingnow": 34766, "vulcan": 34767, "ragon": 34768, "larkin": 34769, "oshi": 34770, "hoco": 34771, "childhood": 34772, "umbrel": 34773, "successor": 34774, "kathy": 34775, "izen": 34776, "°ï¸ı": 34777, "shareholders": 34778, "olga": 34779, "aib": 34780, "heap": 34781, "flaming": 34782, "rou": 34783, "airtel": 34784, "ratt": 34785, "zane": 34786, "vow": 34787, "thorough": 34788, "snag": 34789, "parth": 34790, "unconscious": 34791, "vey": 34792, "newrelease": 34793, "ghee": 34794, "croatian": 34795, "facilitating": 34796, "swanson": 34797, "astoria": 34798, "tology": 34799, "mastery": 34800, "ð٤ij": 34801, "bilbao": 34802, "troupe": 34803, "theori": 34804, "cheyenne": 34805, "rott": 34806, "shoreline": 34807, "grasso": 34808, "masterchef": 34809, "+)": 34810, "vix": 34811, "ellenshow": 34812, "asg": 34813, "anak": 34814, "kuya": 34815, "safarilive": 34816, "debuting": 34817, "blum": 34818, "listener": 34819, "vins": 34820, "bookshelf": 34821, "smartcities": 34822, "makeyourownlane": 34823, ";;": 34824, "ðŁIJ¯": 34825, "rizz": 34826, "onward": 34827, "bulldog": 34828, "bearish": 34829, "viruses": 34830, "frigh": 34831, "linden": 34832, "weiser": 34833, "snt": 34834, "gona": 34835, "dresden": 34836, "flanders": 34837, "cuk": 34838, "wheeling": 34839, "bau": 34840, "atuesday": 34841, "surfers": 34842, "swift": 34843, "mccall": 34844, "arbitration": 34845, "awd": 34846, "monc": 34847, "bine": 34848, "atx": 34849, "refr": 34850, "miro": 34851, "posey": 34852, "nare": 34853, "ritter": 34854, "âģ¦": 34855, "playbook": 34856, "blowout": 34857, "sportsmanship": 34858, "soooooo": 34859, "malayalam": 34860, "grims": 34861, "burbank": 34862, "infinity": 34863, "sargent": 34864, "oitnb": 34865, "josephine": 34866, "skipping": 34867, "parkin": 34868, "excursion": 34869, "seminars": 34870, "johar": 34871, "partridge": 34872, "postgame": 34873, "llll": 34874, "blanche": 34875, "tempting": 34876, "mna": 34877, "luka": 34878, "isers": 34879, "toffee": 34880, "barron": 34881, "hemmings": 34882, "sae": 34883, "gohawks": 34884, "cupid": 34885, "limbs": 34886, "conse": 34887, "uncommon": 34888, "zada": 34889, "headshot": 34890, "soils": 34891, "pioneer": 34892, "mamma": 34893, "semitic": 34894, "pandey": 34895, "jamiedornan": 34896, "splits": 34897, "vela": 34898, "soni": 34899, "raff": 34900, "tmobile": 34901, "âŀĸ": 34902, "prawns": 34903, "liter": 34904, "enjoyment": 34905, "eggplant": 34906, "tub": 34907, "cultural": 34908, "usic": 34909, "suspicion": 34910, "sycam": 34911, "summed": 34912, "madu": 34913, "hock": 34914, "upwards": 34915, "eyeing": 34916, "rive": 34917, "assassins": 34918, "âĤ¬": 34919, "outfy": 34920, "chives": 34921, "tner": 34922, "lais": 34923, "porridge": 34924, "saddest": 34925, "wcc": 34926, "vicki": 34927, "snails": 34928, "bizitalk": 34929, "millan": 34930, "ðŁĮį": 34931, "samoa": 34932, "jing": 34933, "mikey": 34934, "guj": 34935, "chelms": 34936, "eligibility": 34937, "armada": 34938, "throp": 34939, "surgeries": 34940, "ãĤ¿": 34941, "mohawk": 34942, "exits": 34943, "mem": 34944, "islington": 34945, "cme": 34946, "landfill": 34947, "kaitlyn": 34948, "ðŁİ¼": 34949, "combinations": 34950, "tomorrowland": 34951, "verb": 34952, "cora": 34953, "precisely": 34954, "naom": 34955, "ðŁĨķ": 34956, "shrink": 34957, "softly": 34958, "mercede": 34959, "mandel": 34960, "poodle": 34961, "ballerina": 34962, "soph": 34963, "juxta": 34964, "yat": 34965, "aryan": 34966, "hesitate": 34967, "lowered": 34968, "gular": 34969, "dungeonsand": 34970, "ronan": 34971, "myri": 34972, "spf": 34973, "menopau": 34974, "grasp": 34975, "pathi": 34976, "feasi": 34977, "flaw": 34978, "shistory": 34979, "steward": 34980, "ggle": 34981, "fayre": 34982, "clique": 34983, "credibility": 34984, "yog": 34985, "section": 34986, "musko": 34987, "seville": 34988, "nott": 34989, "calm": 34990, "mateo": 34991, "indicted": 34992, "fiba": 34993, "byl": 34994, "lino": 34995, "ukin": 34996, "!!#": 34997, "enigma": 34998, "sirius": 34999, "busc": 35000, "ðŁįĬ": 35001, "mackerel": 35002, "psalms": 35003, "aat": 35004, "tomorrowspaper": 35005, "ðŁĺĸ": 35006, "pfc": 35007, "...........": 35008, "shrek": 35009, "mullet": 35010, "osh": 35011, "dangerously": 35012, "immensely": 35013, "amur": 35014, "ðŁįĤ": 35015, "propor": 35016, "sya": 35017, "londonmarathon": 35018, "above": 35019, "obligatory": 35020, "prov": 35021, "racha": 35022, "alexis": 35023, "primary": 35024, "shh": 35025, "ethernet": 35026, "dstv": 35027, "cougar": 35028, "unlucky": 35029, "nil": 35030, "steakhouse": 35031, "mela": 35032, "fcbayern": 35033, "causeway": 35034, "catherine": 35035, "fluorescent": 35036, "nxt": 35037, "tokyo": 35038, "ausp": 35039, "relegation": 35040, "quizz": 35041, "shoreditch": 35042, "proudtobe": 35043, "promos": 35044, "interacting": 35045, "homebrew": 35046, "daesh": 35047, "wpg": 35048, "steadily": 35049, "provinces": 35050, "ballots": 35051, "iah": 35052, "alto": 35053, "<<<": 35054, "youu": 35055, "riley": 35056, "preference": 35057, "traverse": 35058, "incense": 35059, "ammunition": 35060, "hodges": 35061, "#@": 35062, "hailstate": 35063, "tartan": 35064, "witchcraft": 35065, "ventilation": 35066, "libertarian": 35067, "!â̦": 35068, "owes": 35069, "%!": 35070, "ongchang": 35071, "brushing": 35072, "leic": 35073, "fiber": 35074, "underattack": 35075, "download": 35076, "expir": 35077, "hyo": 35078, "pompey": 35079, "mcbride": 35080, "yag": 35081, "stree": 35082, "combat": 35083, "tending": 35084, "aira": 35085, "guggen": 35086, "abra": 35087, "inna": 35088, "flips": 35089, "awal": 35090, "mach": 35091, "dollar": 35092, "inspirations": 35093, "zum": 35094, "odu": 35095, "itty": 35096, "videogame": 35097, "aquaman": 35098, "haru": 35099, "belfast": 35100, "jeb": 35101, "butch": 35102, "usgs": 35103, "calculus": 35104, "goyal": 35105, "morgen": 35106, "xfinity": 35107, "standup": 35108, "contracep": 35109, "sabre": 35110, "nabe": 35111, "insecure": 35112, "generously": 35113, "epitome": 35114, "lw": 35115, "tca": 35116, "narratives": 35117, "donnell": 35118, "pandas": 35119, "bergh": 35120, "tut": 35121, "keral": 35122, "felicity": 35123, "brampton": 35124, "quintet": 35125, "nomore": 35126, "ðŁĶij": 35127, "loi": 35128, "alhamdulil": 35129, "ðŁĶ„ðŁĶĹ": 35130, "stoner": 35131, "shawl": 35132, "clinical": 35133, "brendan": 35134, "gone": 35135, "flawed": 35136, "trippy": 35137, "jg": 35138, "allocation": 35139, "poaching": 35140, "vevo": 35141, "mocks": 35142, "leftist": 35143, "bonuses": 35144, "condemned": 35145, "ability": 35146, "stating": 35147, "microbiome": 35148, "biologist": 35149, "foryou": 35150, "wahlberg": 35151, "ssor": 35152, "iftar": 35153, "wul": 35154, "ÑĦоÑĤ": 35155, "pomer": 35156, "meme": 35157, "verte": 35158, "trell": 35159, "trait": 35160, "inlet": 35161, "hormones": 35162, "deliberately": 35163, "villar": 35164, "battleship": 35165, "pbl": 35166, "twenti": 35167, "hokies": 35168, "dalail": 35169, "saya": 35170, "mayfair": 35171, "hans": 35172, "diets": 35173, "⾨⾨": 35174, "odin": 35175, "hotspur": 35176, "papi": 35177, "kana": 35178, "kamp": 35179, "finna": 35180, "flotus": 35181, "tians": 35182, "unicorns": 35183, "tribeca": 35184, "changers": 35185, "foreground": 35186, "outa": 35187, "invaders": 35188, "gettys": 35189, "tomorrowspaperstoday": 35190, "macmillan": 35191, "handwritten": 35192, "wfp": 35193, "ude": 35194, "stateof": 35195, "based": 35196, "âĺģï¸ı": 35197, "casm": 35198, "psyched": 35199, "historians": 35200, "fold": 35201, "dda": 35202, "aggrav": 35203, "pans": 35204, "greenway": 35205, "ausv": 35206, "ðŁĺ¶": 35207, "shraddha": 35208, "index": 35209, "besti": 35210, "zimmer": 35211, "tness": 35212, "eyeshadow": 35213, "otte": 35214, "gots": 35215, "distributing": 35216, "promin": 35217, "yol": 35218, "acea": 35219, "tramrahim": 35220, "hooper": 35221, "supreme": 35222, "jammin": 35223, "intuitive": 35224, "qualifications": 35225, "slim": 35226, "siddi": 35227, "jayne": 35228, "tripping": 35229, "gtx": 35230, "puns": 35231, "emanuel": 35232, "omg": 35233, "midsummer": 35234, "into": 35235, "succulent": 35236, "rien": 35237, "newmexico": 35238, "oor": 35239, "hooking": 35240, "inf": 35241, "ð٤Ŀ": 35242, "flirting": 35243, "nahi": 35244, "gfriend": 35245, "tps": 35246, "helix": 35247, "zs": 35248, "onie": 35249, "ctf": 35250, "kris": 35251, "irresistible": 35252, "flap": 35253, "ðŁijıðŁı»ðŁijıðŁı»": 35254, "uswnt": 35255, "rud": 35256, "ramps": 35257, "pinoy": 35258, "otw": 35259, "lolz": 35260, "lowering": 35261, "favorite": 35262, "tmc": 35263, "phrases": 35264, "hermi": 35265, "averaging": 35266, "embr": 35267, "beno": 35268, "estuary": 35269, "sleeve": 35270, "ribbons": 35271, "tash": 35272, "ู": 35273, "xf": 35274, "awgs": 35275, "sunited": 35276, "breweries": 35277, "anirud": 35278, "punches": 35279, "oldie": 35280, "ipads": 35281, "wifey": 35282, "landlords": 35283, "dji": 35284, "gunner": 35285, "Ć­Ä·Ā“": 35286, "texan": 35287, "exop": 35288, "cassandra": 35289, "soff": 35290, "ðŁļ«": 35291, "ighton": 35292, "bakers": 35293, "awarenessweek": 35294, "vall": 35295, "earp": 35296, "btsbbmas": 35297, "apologizes": 35298, "âļĵï¸ı": 35299, "wasps": 35300, "statesman": 35301, "snatch": 35302, "watchdog": 35303, "rafi": 35304, "afterparty": 35305, "spike": 35306, "jer": 35307, "periph": 35308, "rnc": 35309, "mull": 35310, "leen": 35311, "shies": 35312, "lieu": 35313, "urstrulymahesh": 35314, "merton": 35315, "desai": 35316, "shif": 35317, "ðŁĮ±": 35318, "pedic": 35319, "gosling": 35320, "arranging": 35321, "wwg": 35322, "geny": 35323, "youuu": 35324, "netflix": 35325, "ettes": 35326, "kwi": 35327, "bernardino": 35328, "amiga": 35329, "ب": 35330, "kashmiri": 35331, "tings": 35332, "emeritus": 35333, "decat": 35334, "abdomin": 35335, "dci": 35336, "phases": 35337, "djan": 35338, "beam": 35339, "opry": 35340, "ished": 35341, "theellenshow": 35342, "thest": 35343, "habitats": 35344, "toons": 35345, "mclaughlin": 35346, "ripper": 35347, "microbiology": 35348, "talaga": 35349, "clueless": 35350, "ssu": 35351, "croche": 35352, "bromance": 35353, "longevity": 35354, "zagreb": 35355, "prevented": 35356, "trave": 35357, "spoilt": 35358, "darryl": 35359, "migraine": 35360, "alcat": 35361, "dddd": 35362, "viv": 35363, "serpent": 35364, "mattel": 35365, "jama": 35366, "conquest": 35367, "ƮĦ": 35368, "samsung": 35369, "presbyterian": 35370, "ketch": 35371, "firefox": 35372, "motif": 35373, "lec": 35374, "chopping": 35375, "cherno": 35376, "jann": 35377, "ðŁIJ°": 35378, "prolon": 35379, "wakeup": 35380, "convergence": 35381, "merseyside": 35382, "heartbroken": 35383, "looming": 35384, "hallucin": 35385, "maize": 35386, "communism": 35387, "moh": 35388, "twitterstorians": 35389, "sergey": 35390, "reseller": 35391, "favorable": 35392, "edgy": 35393, "reiter": 35394, "malaga": 35395, "liveme": 35396, "kahn": 35397, "pulsion": 35398, "bigg": 35399, "kimkardashian": 35400, "atio": 35401, "tyranny": 35402, "ruption": 35403, "qant": 35404, "proven": 35405, "byz": 35406, "pushaw": 35407, "kristin": 35408, "eer": 35409, "tardis": 35410, "riz": 35411, "awaken": 35412, "miko": 35413, "undocumented": 35414, "pathfinder": 35415, "indirect": 35416, "resembles": 35417, "hler": 35418, "concealed": 35419, "scandal": 35420, "reim": 35421, "dnb": 35422, "critters": 35423, "attendant": 35424, "apprenticeships": 35425, "aau": 35426, "screamed": 35427, "lsu": 35428, "fah": 35429, "harbour": 35430, "edd": 35431, "batsman": 35432, "liss": 35433, "misha": 35434, "spaniel": 35435, "itf": 35436, "advancement": 35437, "fac": 35438, "closeup": 35439, "cecilia": 35440, "medic": 35441, "narcissi": 35442, "lavish": 35443, "giac": 35444, "mays": 35445, "leit": 35446, "winewednesday": 35447, "pushaward": 35448, "letto": 35449, "currents": 35450, "bugatti": 35451, "outine": 35452, "wj": 35453, "undo": 35454, "lerosis": 35455, "devotional": 35456, "ðŁij«": 35457, "onna": 35458, "faisal": 35459, "sauna": 35460, "himachal": 35461, "amii": 35462, "à®®": 35463, "dizzy": 35464, "screenwriting": 35465, "phx": 35466, "spn": 35467, "icki": 35468, "agirl": 35469, "fishes": 35470, "wbz": 35471, "pim": 35472, "boar": 35473, "acid": 35474, "!..": 35475, "rockefeller": 35476, "nga": 35477, "drastically": 35478, "simplify": 35479, "drumming": 35480, "autumnal": 35481, "gurmee": 35482, "lorde": 35483, "joann": 35484, "giveup": 35485, "bour": 35486, "amura": 35487, "derland": 35488, "simpler": 35489, "watson": 35490, "trident": 35491, "concordia": 35492, "bellum": 35493, "brek": 35494, "dumplings": 35495, "vion": 35496, "dungeonsanddragons": 35497, "spri": 35498, "ascension": 35499, "wildatlantic": 35500, "ust": 35501, "robins": 35502, "legion": 35503, "insist": 35504, "jaro": 35505, "guess": 35506, "sob": 35507, "bighit": 35508, "poolside": 35509, "negotiating": 35510, "mcgill": 35511, "bild": 35512, "technicians": 35513, "mitigation": 35514, "ajaydevgn": 35515, "bto": 35516, "anten": 35517, "cosmopolitan": 35518, "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, "patrioti": 35520, "temper": 35521, "promenade": 35522, "navajo": 35523, "namm": 35524, "wrinkles": 35525, "dcfc": 35526, "leach": 35527, "brunette": 35528, "rf": 35529, "coutinho": 35530, "alti": 35531, "traditionally": 35532, "optome": 35533, "naz": 35534, "accordingly": 35535, "recard": 35536, "deets": 35537, "swell": 35538, "posure": 35539, "whitening": 35540, "stranger": 35541, "illion": 35542, "hereford": 35543, "uwu": 35544, "robber": 35545, "cotswolds": 35546, "clen": 35547, "gorge": 35548, "namaste": 35549, "relish": 35550, "griff": 35551, "adrenaline": 35552, "blasio": 35553, "vale": 35554, "ê²": 35555, "tolerate": 35556, "railminindia": 35557, "jensen": 35558, "hoven": 35559, "ellu": 35560, "obsole": 35561, "eisenhower": 35562, "unidentified": 35563, "thanniversary": 35564, "bodyguard": 35565, "د": 35566, "idge": 35567, "schal": 35568, "stockport": 35569, "sni": 35570, "retaining": 35571, "popo": 35572, "pixie": 35573, "olithic": 35574, "kier": 35575, "hajj": 35576, "saz": 35577, "corbin": 35578, "!!!!!!!!!!": 35579, "vit": 35580, "megat": 35581, "deh": 35582, "circuit": 35583, "affleck": 35584, "theoretical": 35585, "hopeless": 35586, "uab": 35587, "slump": 35588, "bice": 35589, "jammed": 35590, "letstalk": 35591, "cani": 35592, "sideways": 35593, "labyrinth": 35594, "refs": 35595, "hahn": 35596, "jared": 35597, "ðŁį¹": 35598, "jambo": 35599, "phyl": 35600, "enhancement": 35601, "ctr": 35602, "fullest": 35603, "seye": 35604, "doba": 35605, "choic": 35606, "yos": 35607, "cbj": 35608, "andré": 35609, "rewatch": 35610, "prima": 35611, "doctrine": 35612, "forgets": 35613, "uhm": 35614, "around": 35615, "ule": 35616, "artlovers": 35617, "shiraz": 35618, "harth": 35619, "extor": 35620, "Ć…Ā”": 35621, "unexpectedly": 35622, "elius": 35623, "yx": 35624, "emmy": 35625, "seac": 35626, "ðŁijĩðŁijĩðŁijĩ": 35627, "corrected": 35628, "combu": 35629, "womanc": 35630, "cough": 35631, "whatson": 35632, "publishes": 35633, "diversity": 35634, "backbone": 35635, "lockdown": 35636, "mesmerizing": 35637, "norte": 35638, "mab": 35639, "designer": 35640, "Ć­Ä£": 35641, "ragh": 35642, "molecules": 35643, "getoutside": 35644, "thebeatles": 35645, "semiconduc": 35646, "nacho": 35647, "lunes": 35648, "hammers": 35649, "sultan": 35650, "oon": 35651, "feren": 35652, "attach": 35653, "arqu": 35654, "uttarakhand": 35655, "sash": 35656, ";-": 35657, "tread": 35658, "iko": 35659, "arthur": 35660, "scandinavian": 35661, "ration": 35662, "gael": 35663, "chargeable": 35664, "fishy": 35665, "vma": 35666, "handbags": 35667, "chara": 35668, "ayne": 35669, "defam": 35670, "settlers": 35671, "qadri": 35672, "palais": 35673, "inwx": 35674, "apocalyptic": 35675, "pooja": 35676, "aes": 35677, "atories": 35678, "proofing": 35679, "nlp": 35680, "tsla": 35681, "vina": 35682, "lido": 35683, "deephouse": 35684, "informatics": 35685, "vv": 35686, "ppings": 35687, "diss": 35688, "ï": 35689, "uhuru": 35690, "stony": 35691, "betrayed": 35692, "baff": 35693, "myra": 35694, "aspen": 35695, "allowance": 35696, "tamara": 35697, "cif": 35698, "corbett": 35699, "serge": 35700, "digo": 35701, "ambigu": 35702, "painters": 35703, "pcr": 35704, "pca": 35705, "noms": 35706, "loft": 35707, "vee": 35708, "opendata": 35709, "ðŁIJ±": 35710, "alexandre": 35711, "identifies": 35712, "fantasyfootball": 35713, "reproduction": 35714, "bromley": 35715, "wareagle": 35716, "mmer": 35717, "pss": 35718, "cues": 35719, "ayat": 35720, "hutchinson": 35721, "sarac": 35722, "jackman": 35723, "irah": 35724, "apink": 35725, "cols": 35726, "aussies": 35727, "execs": 35728, "dayton": 35729, "ðŁĻĨ": 35730, "imv": 35731, "haram": 35732, "chuckle": 35733, "authenticity": 35734, "ardo": 35735, "incubator": 35736, "Ć ĀøĀŖ": 35737, "photoshopped": 35738, "embraced": 35739, "fightfor": 35740, "gorman": 35741, "zzzz": 35742, "scholastic": 35743, "crisps": 35744, "teapo": 35745, "midnight": 35746, "gaine": 35747, "collier": 35748, "sate": 35749, "dette": 35750, "ÄŃ": 35751, "imagine": 35752, "iff": 35753, "twili": 35754, "ification": 35755, "teatro": 35756, "norma": 35757, "esur": 35758, "emergencies": 35759, "riseup": 35760, "ringer": 35761, "hassle": 35762, "caitlyn": 35763, "tranquil": 35764, "versa": 35765, "seb": 35766, "overlook": 35767, "gini": 35768, "bogo": 35769, "sere": 35770, "mayne": 35771, "henrik": 35772, "contaminated": 35773, "rhapsody": 35774, "proportion": 35775, "wildatlanticway": 35776, "âģ©.": 35777, "organisers": 35778, "trane": 35779, "standard": 35780, "sperm": 35781, "launcher": 35782, "ricci": 35783, "herts": 35784, "paperwork": 35785, "showcased": 35786, "meryl": 35787, "pena": 35788, "pimp": 35789, "disastrous": 35790, "^.^": 35791, "phara": 35792, "xis": 35793, "frontal": 35794, "swirl": 35795, "spills": 35796, "swagger": 35797, "smartwatch": 35798, "sizzling": 35799, "saviour": 35800, "catar": 35801, "bbcr": 35802, "refurbishment": 35803, "dris": 35804, "citroen": 35805, "absorb": 35806, "patriotism": 35807, "illeg": 35808, "chromo": 35809, "freshers": 35810, "rus": 35811, "limiting": 35812, "efish": 35813, "downed": 35814, "mandir": 35815, "hazelnut": 35816, "pall": 35817, "macon": 35818, "disappearing": 35819, "qualifies": 35820, "boon": 35821, "barracks": 35822, "amine": 35823, "gendere": 35824, "ðŁļĺ": 35825, "jes": 35826, "ãĄŃ": 35827, "quito": 35828, "middleweight": 35829, "schau": 35830, "quadru": 35831, "aciones": 35832, "limitless": 35833, "ðŁijĮðŁı½": 35834, "chman": 35835, "arav": 35836, "regulators": 35837, "itup": 35838, "battersea": 35839, "milford": 35840, "gz": 35841, "ticking": 35842, "ghou": 35843, "crushes": 35844, "tutu": 35845, "dreadful": 35846, "famine": 35847, "forchange": 35848, "dalailama": 35849, "ðŁēį": 35850, "whitaker": 35851, "hashmi": 35852, "hus": 35853, "vod": 35854, "bette": 35855, "aaah": 35856, "isoo": 35857, "ðŁ„Ī": 35858, "haar": 35859, "laine": 35860, "bv": 35861, "allday": 35862, "sprout": 35863, "indiegames": 35864, "freebie": 35865, "greeks": 35866, "butler": 35867, "illin": 35868, "haal": 35869, "wareness": 35870, "sima": 35871, "publichealth": 35872, "gama": 35873, "waa": 35874, "oung": 35875, "goooo": 35876, "okinawa": 35877, "offenders": 35878, "impose": 35879, "hoc": 35880, "youngster": 35881, "storyteller": 35882, "scap": 35883, "fighter": 35884, "+,": 35885, "whites": 35886, "musicmonday": 35887, "reza": 35888, "goducks": 35889, "bria": 35890, "mium": 35891, "casper": 35892, "crumbs": 35893, "aad": 35894, "martialarts": 35895, "chp": 35896, "rigged": 35897, "tng": 35898, "harvested": 35899, "sak": 35900, "dojo": 35901, "millwall": 35902, "bnw": 35903, "ocd": 35904, "historyof": 35905, "tmr": 35906, "sirens": 35907, "fanci": 35908, "caregivers": 35909, "vira": 35910, "soni": 35911, "recurring": 35912, "acknowledged": 35913, "ðŁıŁ": 35914, "ophile": 35915, "bucky": 35916, "stressing": 35917, "rook": 35918, "digger": 35919, "vival": 35920, "sando": 35921, "fleet": 35922, "siers": 35923, "selcaday": 35924, "refreshed": 35925, "antifa": 35926, "aque": 35927, "polo": 35928, "disappearance": 35929, "demb": 35930, "âĮļï¸ı": 35931, "rented": 35932, "berger": 35933, "gmb": 35934, "cula": 35935, "ssal": 35936, "goody": 35937, "uhh": 35938, "marcelo": 35939, "wanna": 35940, "software": 35941, "shopsmall": 35942, "turtle": 35943, "tomas": 35944, "frisco": 35945, "ðŁĺįðŁēķ": 35946, "jimenez": 35947, "csu": 35948, "dayz": 35949, "ando": 35950, "wynne": 35951, "choreographer": 35952, "cervical": 35953, "trailblazers": 35954, "edg": 35955, "zendaya": 35956, "travelblog": 35957, "els": 35958, "wholesome": 35959, "cog": 35960, "labout": 35961, "arney": 35962, "delle": 35963, "suisse": 35964, "masi": 35965, "inese": 35966, "ombe": 35967, "fiddle": 35968, "reclaim": 35969, "pau": 35970, "watcher": 35971, "slain": 35972, "berty": 35973, "optimum": 35974, "elites": 35975, "minis": 35976, "turkey": 35977, "patrols": 35978, "gerard": 35979, "aureli": 35980, "wildly": 35981, "waltz": 35982, "brgy": 35983, "wob": 35984, "crest": 35985, "+++": 35986, "vez": 35987, "frosted": 35988, "davido": 35989, "thex": 35990, "paramedics": 35991, "pinto": 35992, "hank": 35993, "dupont": 35994, "urg": 35995, "fostering": 35996, "micropoetry": 35997, "spectre": 35998, "---->": 35999, "neuro": 36000, "frida": 36001, "musical": 36002, "galveston": 36003, "effic": 36004, "scape": 36005, "palazzo": 36006, "thall": 36007, "provisional": 36008, "pjs": 36009, "aure": 36010, "ðŁĶľ": 36011, "mamamoo": 36012, "kitties": 36013, "cree": 36014, "wak": 36015, "loool": 36016, "lupus": 36017, "cnblue": 36018, "ú": 36019, "ðŁİ¬": 36020, "raced": 36021, "trose": 36022, "omas": 36023, "stride": 36024, "coors": 36025, "⤵ï¸ı": 36026, "incomparable": 36027, "cyril": 36028, "broader": 36029, "areclipse": 36030, "ðŁįĶ": 36031, "interval": 36032, "tiru": 36033, "coworking": 36034, "waco": 36035, "aham": 36036, "abee": 36037, "flourish": 36038, "thetimes": 36039, "olini": 36040, "kickboxing": 36041, "lucer": 36042, "atla": 36043, "asun": 36044, "casserole": 36045, "miaw": 36046, "lobbying": 36047, "janice": 36048, "cirque": 36049, "reflex": 36050, "leary": 36051, "sanatomy": 36052, "tempest": 36053, "semb": 36054, "murdering": 36055, "usav": 36056, "robo": 36057, "onet": 36058, "pcc": 36059, "natives": 36060, "lifeof": 36061, "saha": 36062, "ruthless": 36063, "relates": 36064, "appetizer": 36065, "pyeongchang": 36066, "nord": 36067, "eru": 36068, "athing": 36069, "ugly": 36070, "plying": 36071, "brance": 36072, "organise": 36073, "kendra": 36074, "dato": 36075, "cheeses": 36076, "parma": 36077, "burnout": 36078, "astra": 36079, "pretoria": 36080, "adjustment": 36081, "uku": 36082, "slo": 36083, "liken": 36084, "favors": 36085, "clive": 36086, "beets": 36087, "snowdonia": 36088, "gotv": 36089, "syn": 36090, "openhouse": 36091, "pani": 36092, "portrayed": 36093, "slated": 36094, "mecca": 36095, "renal": 36096, "supportsmallstreamers": 36097, "staffs": 36098, "dao": 36099, "biker": 36100, "viktor": 36101, "titus": 36102, "admired": 36103, "ðŁĵ±": 36104, "hurrican": 36105, "heats": 36106, "glory": 36107, "photogenic": 36108, "meri": 36109, "depor": 36110, "burnham": 36111, "orangu": 36112, "djing": 36113, "impressionism": 36114, "ignition": 36115, "cai": 36116, "wynn": 36117, "depe": 36118, "coveted": 36119, "collagen": 36120, "saus": 36121, "ornam": 36122, "administrators": 36123, "sson": 36124, "nhpolitics": 36125, "hahahahahahahaha": 36126, "aspirations": 36127, "rgb": 36128, "swollen": 36129, "sowe": 36130, "scr": 36131, "divergent": 36132, "houghton": 36133, "hanoi": 36134, "dory": 36135, "niki": 36136, "landry": 36137, "bcci": 36138, "ðŁijĮðŁijĮ": 36139, "ismail": 36140, "tripod": 36141, "herd": 36142, "bhatt": 36143, "dressage": 36144, "tabby": 36145, "inguish": 36146, "huron": 36147, "à³į": 36148, "ĆƒÅ‚": 36149, "todas": 36150, "evangelical": 36151, "chords": 36152, "stjohn": 36153, "sloppy": 36154, "martyr": 36155, "facebook": 36156, "alight": 36157, "sensei": 36158, "kathniel": 36159, "rites": 36160, "zione": 36161, "uo": 36162, "revelations": 36163, "weightlifting": 36164, "pano": 36165, "ncwx": 36166, "acton": 36167, "à®ķ": 36168, "ز": 36169, "soma": 36170, "à¸Ĺ": 36171, "respecting": 36172, "marche": 36173, "foreman": 36174, "betty": 36175, "kik": 36176, "shibu": 36177, "poon": 36178, "argyle": 36179, "kswx": 36180, "etz": 36181, "marbella": 36182, "brackets": 36183, "standby": 36184, "fireside": 36185, "defiance": 36186, "vex": 36187, "britannia": 36188, "inhabit": 36189, "appoint": 36190, "piyush": 36191, "leash": 36192, "sciento": 36193, "flask": 36194, "senna": 36195, ">:": 36196, "atroc": 36197, "sanderson": 36198, "idlib": 36199, "dhanush": 36200, "ðŁĺĻ": 36201, "enthr": 36202, "hitch": 36203, "dedly": 36204, "alley": 36205, "dork": 36206, "mondo": 36207, "cuddly": 36208, "missin": 36209, "yesss": 36210, "nighting": 36211, "jpn": 36212, "wary": 36213, "umpire": 36214, "maz": 36215, "ê³": 36216, "babs": 36217, "ĭãģ": 36218, "stanford": 36219, "possessed": 36220, "exceeded": 36221, "ðŁĶ¶": 36222, "wallart": 36223, "trap": 36224, "jil": 36225, "hibis": 36226, "spying": 36227, "scribe": 36228, "khalil": 36229, "translator": 36230, "lumb": 36231, "dized": 36232, "chc": 36233, "supervision": 36234, "shutter": 36235, "jag": 36236, "_*": 36237, "yesterdays": 36238, "msf": 36239, "hihi": 36240, "gonzaga": 36241, "gillespie": 36242, "vivek": 36243, "ecstatic": 36244, "thismorning": 36245, "chus": 36246, "edes": 36247, "stoned": 36248, "bees": 36249, "ðŁĩ¹ðŁĩ": 36250, "turin": 36251, "hover": 36252, "atrics": 36253, "stern": 36254, "samheughan": 36255, "autism": 36256, "miya": 36257, "eyewitness": 36258, "writings": 36259, "traveltips": 36260, "chutney": 36261, "pxrtg": 36262, "kenyans": 36263, "mystic": 36264, "krit": 36265, "/$": 36266, "redhead": 36267, "worldly": 36268, "amus": 36269, "opla": 36270, "leve": 36271, "gabbana": 36272, "seen": 36273, "oclock": 36274, "ganga": 36275, "keenan": 36276, "scent": 36277, "oldies": 36278, "gogreen": 36279, "cornerstone": 36280, "comply": 36281, "concours": 36282, "ðŁİ¶ðŁİ¶": 36283, "haan": 36284, "confis": 36285, "awson": 36286, "cleop": 36287, "îĢ": 36288, "suzu": 36289, "sauté": 36290, "algar": 36291, "subscriber": 36292, "esteemed": 36293, "ãĤ¤ãĄ": 36294, "worthwhile": 36295, "melrose": 36296, "flock": 36297, "brightly": 36298, "violinist": 36299, "pere": 36300, "slipping": 36301, "andco": 36302, "sigh": 36303, "havan": 36304, "culo": 36305, "msa": 36306, "fibrosis": 36307, "matilda": 36308, "rafting": 36309, "award": 36310, "ëª": 36311, "mmmm": 36312, "geaux": 36313, "steiner": 36314, "sinn": 36315, "helpers": 36316, "beetles": 36317, "aimee": 36318, "taiwan": 36319, "pistachio": 36320, "macbeth": 36321, "mzan": 36322, "descendants": 36323, "onsale": 36324, "inr": 36325, "ilm": 36326, "grouse": 36327, "saig": 36328, "mow": 36329, "bigre": 36330, "adjustments": 36331, "tula": 36332, "mathew": 36333, "translates": 36334, "muh": 36335, "bollah": 36336, "ðŁēĽðŁēĻ": 36337, "amores": 36338, "abouts": 36339, "bombshell": 36340, "blaster": 36341, "xavi": 36342, "sns": 36343, "kroger": 36344, "gather": 36345, "eradic": 36346, "daft": 36347, "chemo": 36348, "benches": 36349, "ðŁĩ©ðŁĩ": 36350, "utv": 36351, "oura": 36352, "nko": 36353, "gatorade": 36354, "biafra": 36355, "okstate": 36356, "imdanielpadilla": 36357, "domains": 36358, "openingday": 36359, "kiddo": 36360, "doi": 36361, "rice": 36362, "daycare": 36363, "macmillan": 36364, "bathurst": 36365, "cheerleading": 36366, "ð٦ģ": 36367, "cashback": 36368, "kwon": 36369, "hobbies": 36370, "exempl": 36371, "riesling": 36372, "âļª": 36373, "agles": 36374, "nys": 36375, "everything": 36376, "navis": 36377, "addi": 36378, "magnesium": 36379, "facelift": 36380, "arkham": 36381, "grandes": 36382, "extremist": 36383, "donat": 36384, "vitality": 36385, "pumpkin": 36386, "betta": 36387, "sltd": 36388, "artisan": 36389, "liby": 36390, "peaked": 36391, "ahhhhh": 36392, "maryam": 36393, "assim": 36394, "unsc": 36395, "mente": 36396, "alaya": 36397, "lowers": 36398, "aras": 36399, "griev": 36400, "leip": 36401, "grati": 36402, "crises": 36403, "sprints": 36404, "execute": 36405, "wto": 36406, "msd": 36407, "magical": 36408, "reviewer": 36409, "sparkles": 36410, "jukebox": 36411, "ðŁĺĤâĿ¤ï¸ı": 36412, "payback": 36413, "licenses": 36414, "dunkin": 36415, "belt": 36416, "lakewood": 36417, "hateful": 36418, "budgets": 36419, "revamped": 36420, "pherson": 36421, "kyiv": 36422, "wentworth": 36423, "rosen": 36424, "cruise": 36425, "giggle": 36426, "defstar": 36427, "assassinscre": 36428, "ymouth": 36429, "winkle": 36430, "wfc": 36431, "bandwagon": 36432, "bkk": 36433, "wiring": 36434, "kearney": 36435, "southside": 36436, "petit": 36437, "!ðŁĺį": 36438, "nordic": 36439, "mirza": 36440, "mugabe": 36441, "vl": 36442, "scones": 36443, "ktv": 36444, "sandal": 36445, "duc": 36446, "malls": 36447, "ðŁēŀðŁēŀ": 36448, "itc": 36449, "alay": 36450, "impair": 36451, "unrest": 36452, "floss": 36453, "cé": 36454, "abou": 36455, "varying": 36456, "museo": 36457, "server": 36458, "diya": 36459, "hibiscus": 36460, "eroy": 36461, "merritt": 36462, "findom": 36463, "fpp": 36464, "unusually": 36465, "gott": 36466, "contingent": 36467, "aliaa": 36468, "ballon": 36469, "jol": 36470, "hiked": 36471, "zyme": 36472, "ayr": 36473, "agn": 36474, "gaz": 36475, "periodic": 36476, "sparty": 36477, "practising": 36478, "linton": 36479, "talis": 36480, "cypri": 36481, "womaninbiz": 36482, "radiodisney": 36483, "ðŁĮ¼": 36484, "jumpers": 36485, "endocr": 36486, "ðŁļ¨ðŁļ¨": 36487, "andon": 36488, "sharapo": 36489, "mier": 36490, "masonic": 36491, "factories": 36492, "vien": 36493, "bbers": 36494, "ìĽIJ": 36495, "hold": 36496, "kebab": 36497, "beak": 36498, "approached": 36499, "acmilan": 36500, "munro": 36501, "kosher": 36502, "excellency": 36503, "negotiation": 36504, "waltdisneyworld": 36505, "crouch": 36506, "teasing": 36507, "suppression": 36508, "enya": 36509, "bce": 36510, "transformationtuesday": 36511, "callie": 36512, "viswas": 36513, "pgat": 36514, "icted": 36515, "endings": 36516, "escu": 36517, "recruited": 36518, "itfc": 36519, "collaborations": 36520, "gino": 36521, "snuck": 36522, "auschwitz": 36523, "ifc": 36524, "xii": 36525, "kesha": 36526, "gervais": 36527, "cloak": 36528, "xl": 36529, "saad": 36530, "probation": 36531, "precau": 36532, "macin": 36533, "anastasi": 36534, "lek": 36535, "eazy": 36536, "daysofcode": 36537, "mariahcarey": 36538, "yog": 36539, "stitched": 36540, "boyfriends": 36541, "shar": 36542, "phile": 36543, "agu": 36544, "twinkle": 36545, "phishing": 36546, "weekender": 36547, "icton": 36548, "gurmeetramrahim": 36549, "alton": 36550, "leness": 36551, "allan": 36552, "penultimate": 36553, "krystal": 36554, "gou": 36555, "lande": 36556, "dismant": 36557, "abusing": 36558, "norse": 36559, "paterson": 36560, "edmun": 36561, "apan": 36562, "xiumin": 36563, "skel": 36564, "catwalk": 36565, "react": 36566, "walled": 36567, "tangle": 36568, "bryn": 36569, "veto": 36570, "supermoon": 36571, "casablanc": 36572, "appreciates": 36573, "skid": 36574, "both": 36575, "catalina": 36576, "eleague": 36577, "cybermonday": 36578, "cautious": 36579, "ð٤ĵ": 36580, "novo": 36581, "hampton": 36582, "haye": 36583, "josef": 36584, "varan": 36585, "lobos": 36586, "roanoke": 36587, "orphans": 36588, "ttin": 36589, "squads": 36590, "ishqbaaaz": 36591, "blackpanther": 36592, "etu": 36593, "ksh": 36594, "crumble": 36595, "cessna": 36596, "relieved": 36597, "scully": 36598, "pollinators": 36599, "explorecanada": 36600, "kies": 36601, "kamloops": 36602, "kiran": 36603, "primal": 36604, "settlements": 36605, "hotspot": 36606, "brainstorming": 36607, "cedric": 36608, "biennial": 36609, "shant": 36610, "âĻ”âĻ”âĻ”": 36611, "doon": 36612, "hearn": 36613, "walkway": 36614, "fem": 36615, "veal": 36616, "deportation": 36617, "toxins": 36618, "eliminating": 36619, "descending": 36620, "bythe": 36621, "blasphe": 36622, "hasta": 36623, "complement": 36624, "ascent": 36625, "riga": 36626, "provost": 36627, "âĸª": 36628, "weeping": 36629, "antisemitism": 36630, "employee": 36631, "unearthed": 36632, "pino": 36633, "natalie": 36634, "blad": 36635, "angola": 36636, "lockheed": 36637, "inian": 36638, "agr": 36639, "nister": 36640, "impala": 36641, "mke": 36642, "fanatic": 36643, "âĺħâĺħ": 36644, "ðŁij¸": 36645, "luch": 36646, "simplified": 36647, "gallery": 36648, "economic": 36649, "cyborg": 36650, "coni": 36651, "selma": 36652, "inception": 36653, "koala": 36654, "dvds": 36655, "crested": 36656, "mmor": 36657, "visible": 36658, "nsd": 36659, "ðŁĻĮðŁı½": 36660, "wunder": 36661, "refrigerator": 36662, "reopening": 36663, "eera": 36664, "carousel": 36665, "asp": 36666, "ballistic": 36667, "victory": 36668, "motive": 36669, "trey": 36670, "sharapova": 36671, "sii": 36672, "monter": 36673, "intend": 36674, "westchester": 36675, "spe": 36676, "cymb": 36677, "vidal": 36678, "llama": 36679, "univ": 36680, "finer": 36681, "craftsmanship": 36682, "jazzfest": 36683, "bch": 36684, "aggio": 36685, "ncc": 36686, "lambda": 36687, "tranquility": 36688, "cisco": 36689, "baden": 36690, "sobbing": 36691, "ofi": 36692, "gota": 36693, "rumored": 36694, "warmed": 36695, "orean": 36696, "acton": 36697, "marci": 36698, "ghani": 36699, "âľĵ": 36700, "assorted": 36701, "pembroke": 36702, "penelope": 36703, "daf": 36704, "atty": 36705, "aimo": 36706, "pretzel": 36707, "carnival": 36708, "thanos": 36709, "kochi": 36710, "mersal": 36711, "hamradio": 36712, "artwit": 36713, "casc": 36714, "guerrilla": 36715, "kushner": 36716, "kapp": 36717, "alise": 36718, "toddlers": 36719, "stewardship": 36720, "otti": 36721, "terri": 36722, "tempe": 36723, "restless": 36724, "vito": 36725, "zayed": 36726, "rspb": 36727, "pion": 36728, "hippo": 36729, "hawthorne": 36730, "inas": 36731, "amily": 36732, "nutcracker": 36733, "lop": 36734, "dali": 36735, "tropic": 36736, "ðŁ¤ł": 36737, "ulo": 36738, "jaredle": 36739, "pyrene": 36740, "paleo": 36741, "usair": 36742, "mould": 36743, "itated": 36744, "genetically": 36745, "biomass": 36746, "ðŁĩ³ðŁĩ±": 36747, "dodd": 36748, "practiced": 36749, "monarchs": 36750, "unmanned": 36751, "mbuhari": 36752, "amal": 36753, "photogra": 36754, "kool": 36755, "brendon": 36756, "juices": 36757, "cure": 36758, "worldbank": 36759, "pointers": 36760, "ðŁēĿ": 36761, "turf": 36762, "leds": 36763, "borussia": 36764, "baptism": 36765, "warwickshire": 36766, "mounts": 36767, "gayo": 36768, "begg": 36769, "copied": 36770, "asians": 36771, "kg": 36772, "modernist": 36773, "gid": 36774, "frontman": 36775, "concentrated": 36776, "yt": 36777, "scavenger": 36778, "ironically": 36779, "adic": 36780, "psn": 36781, "ðŁ„ī": 36782, "culturally": 36783, "yuv": 36784, "macarthur": 36785, "fertilizer": 36786, "bewithyou": 36787, "rigor": 36788, "minors": 36789, "zoning": 36790, "âĸł": 36791, "rir": 36792, "adolescent": 36793, "vinny": 36794, "reng": 36795, "sandstone": 36796, "guet": 36797, "westh": 36798, "pledged": 36799, "laced": 36800, "spide": 36801, "vai": 36802, "tycoon": 36803, "seizure": 36804, "dup": 36805, "appalachian": 36806, "rok": 36807, "catholics": 36808, "seychel": 36809, "possess": 36810, "lager": 36811, "jodi": 36812, "champ": 36813, "stras": 36814, "dina": 36815, "centuri": 36816, "calder": 36817, "bluray": 36818, "ðŁĩ¨ðŁĩ³": 36819, "modo": 36820, "annette": 36821, "youtubers": 36822, "chaps": 36823, "angling": 36824, "labeling": 36825, "aqui": 36826, "pkwy": 36827, "lyle": 36828, "bisexual": 36829, "litur": 36830, "dugout": 36831, "libby": 36832, "greysanatomy": 36833, "substances": 36834, "augustus": 36835, "rallying": 36836, "fidel": 36837, "ingue": 36838, "人": 36839, "hallmarkchannel": 36840, "toothbrush": 36841, "mÔ": 36842, "adirond": 36843, "aggi": 36844, "ðŁĵį:": 36845, "crusade": 36846, "taxation": 36847, "kz": 36848, "iver": 36849, "doubling": 36850, "roomie": 36851, "wab": 36852, "enrolled": 36853, "azon": 36854, "aju": 36855, "grandchildren": 36856, "asdf": 36857, "ðŁ„º": 36858, "matic": 36859, "oughton": 36860, "utilize": 36861, "ðŁē£": 36862, "ponder": 36863, "raisin": 36864, "dysfunction": 36865, "cobain": 36866, "butternut": 36867, "eman": 36868, "sured": 36869, "drian": 36870, "andfriends": 36871, "withthe": 36872, "onomy": 36873, "heineken": 36874, "bridal": 36875, "leadership": 36876, "pyramids": 36877, "deutschland": 36878, "jocel": 36879, "bowel": 36880, "yqr": 36881, "horsepower": 36882, "beacon": 36883, "ingeni": 36884, "gradient": 36885, "fermented": 36886, "moom": 36887, "thingy": 36888, "potassi": 36889, "wristband": 36890, "bord": 36891, "bodied": 36892, "ðŁĺŃðŁĺį": 36893, "mapp": 36894, "kau": 36895, "cyberpunk": 36896, "phish": 36897, "looking": 36898, "coates": 36899, "apur": 36900, "amie": 36901, "uklabour": 36902, "atin": 36903, "gla": 36904, "adoptable": 36905, "shelby": 36906, "villi": 36907, "riya": 36908, "mingly": 36909, "climber": 36910, "bumblebee": 36911, "ðŁĺ¸": 36912, "csd": 36913, "âĿ„": 36914, "hospitalized": 36915, "cki": 36916, "hater": 36917, "chr": 36918, "retina": 36919, "ita": 36920, "fanbase": 36921, "beatrice": 36922, "gwyne": 36923, "goss": 36924, "fos": 36925, "favorited": 36926, "swachhbharat": 36927, "malade": 36928, "monmouth": 36929, "\"[": 36930, "sivan": 36931, "shhh": 36932, "commanding": 36933, "sainsburys": 36934, "weed": 36935, "gman": 36936, "ssw": 36937, "reptile": 36938, "ivy": 36939, "tropics": 36940, "rollers": 36941, "overcast": 36942, "exposition": 36943, "masquerade": 36944, "mancrush": 36945, "waist": 36946, "sprinter": 36947, "sleet": 36948, "levin": 36949, "jpg": 36950, "_(": 36951, "opel": 36952, "exploit": 36953, "apa": 36954, "powe": 36955, "wrecking": 36956, "jongin": 36957, "orb": 36958, "erick": 36959, "bosco": 36960, "praising": 36961, "bertr": 36962, "towing": 36963, "insecurity": 36964, "kut": 36965, "restocked": 36966, "rrp": 36967, "prescribed": 36968, "trafalgar": 36969, "pert": 36970, "gases": 36971, "apprais": 36972, "ghar": 36973, "musicals": 36974, "âĸ¬âĸ¬": 36975, "mcfad": 36976, "agony": 36977, "condition": 36978, "equip": 36979, "shik": 36980, "atravel": 36981, "ðŁĩ¿ðŁĩ¦": 36982, "keh": 36983, "abduction": 36984, "peoria": 36985, "wilkins": 36986, "gms": 36987, "asd": 36988, "evi": 36989, "ðŁēĹðŁēĹðŁēĹ": 36990, "uz": 36991, "moc": 36992, "hallelujah": 36993, "guadalu": 36994, "louvre": 36995, "drawing": 36996, "gove": 36997, "phant": 36998, "frie": 36999, "webdev": 37000, "programmer": 37001, "zable": 37002, "gamescom": 37003, "clarify": 37004, "lith": 37005, "kinky": 37006, "âĿ£": 37007, "labourdoorstep": 37008, "sonata": 37009, "juris": 37010, "maiden": 37011, "viadu": 37012, "bucharest": 37013, "conditioned": 37014, "capitalist": 37015, "ude": 37016, "psb": 37017, "spca": 37018, "lulla": 37019, "foothills": 37020, "kayo": 37021, "bond": 37022, "womb": 37023, "rounder": 37024, "cesar": 37025, "bursts": 37026, "apra": 37027, "swoon": 37028, "sabrin": 37029, "fragrant": 37030, "clearer": 37031, "kubrick": 37032, "climax": 37033, "journo": 37034, "agle": 37035, "ðŁı½âĢįâĻĢï¸ı": 37036, "pooch": 37037, "hale": 37038, "solit": 37039, "salmon": 37040, "organisms": 37041, "bronson": 37042, "arten": 37043, "hodgson": 37044, "alove": 37045, "venture": 37046, "bbi": 37047, "aea": 37048, "ðŁIJ¢": 37049, "ldn": 37050, "dnr": 37051, "ozone": 37052, "ellas": 37053, "manny": 37054, "azzur": 37055, "unbeat": 37056, "truffles": 37057, "thong": 37058, "mañ": 37059, "lasers": 37060, "leye": 37061, "gettysburg": 37062, "backpacks": 37063, "oris": 37064, "maison": 37065, "crawling": 37066, "labra": 37067, "cling": 37068, "dragging": 37069, "steal": 37070, "doubt": 37071, "devan": 37072, "ckers": 37073, "agentsof": 37074, "photobomb": 37075, "elonmusk": 37076, "aboy": 37077, "distances": 37078, "storyline": 37079, "spi": 37080, "northan": 37081, "europeans": 37082, "whale": 37083, "serpent": 37084, "ðŁļ²": 37085, "fior": 37086, "trit": 37087, "oxo": 37088, "awarding": 37089, "classmate": 37090, "sufc": 37091, "smartest": 37092, "riches": 37093, "prk": 37094, "bigfoot": 37095, "armb": 37096, "bipolar": 37097, "dwelling": 37098, "omars": 37099, "kwan": 37100, "grime": 37101, "meng": 37102, "frederick": 37103, "navarro": 37104, "sorrynotsorry": 37105, "jaredleto": 37106, "pave": 37107, "slack": 37108, "barnsley": 37109, "attar": 37110, "eviction": 37111, "accumulation": 37112, "oir": 37113, "catchy": 37114, "welter": 37115, "vikas": 37116, "hassee": 37117, "nikita": 37118, "moyes": 37119, "mathews": 37120, "shiv": 37121, "gatwick": 37122, "profiling": 37123, "companions": 37124, "marrake": 37125, "antics": 37126, "ðŁĻĮðŁĻĮðŁĻĮ": 37127, "sese": 37128, "boi": 37129, "bartlett": 37130, "poisonous": 37131, "abuses": 37132, "ymm": 37133, "kampala": 37134, "guggenheim": 37135, "imvkohli": 37136, "dolom": 37137, "bree": 37138, "throttle": 37139, "gareth": 37140, "fitzpatrick": 37141, "unya": 37142, "parad": 37143, "margot": 37144, "jnr": 37145, "wea": 37146, "potassium": 37147, "pnc": 37148, "disguised": 37149, "crash": 37150, "renergy": 37151, "illic": 37152, "coupled": 37153, "niels": 37154, "ciones": 37155, "æĹ„": 37156, "iment": 37157, "despicable": 37158, "dye": 37159, "whatcha": 37160, "connections": 37161, "paralympics": 37162, "gauntlet": 37163, "waitrose": 37164, "suicidal": 37165, "starship": 37166, "vapor": 37167, "stou": 37168, "lawmaker": 37169, "cooled": 37170, "simo": 37171, "theno": 37172, "offroad": 37173, "jaden": 37174, "basque": 37175, "vicky": 37176, "lukaku": 37177, "centro": 37178, "trish": 37179, "strategist": 37180, "medications": 37181, "horst": 37182, "bfc": 37183, "grail": 37184, "sharply": 37185, "aditya": 37186, "tomb": 37187, "kaufman": 37188, "tripad": 37189, "samba": 37190, "pastoral": 37191, "britney": 37192, "sagan": 37193, "hillside": 37194, "masons": 37195, "sara": 37196, "zone": 37197, "xu": 37198, "totes": 37199, "robbie": 37200, "appen": 37201, "montag": 37202, "dero": 37203, "shortfilm": 37204, "charismatic": 37205, "tators": 37206, "kiba": 37207, "andri": 37208, "alarming": 37209, "splitting": 37210, "icar": 37211, "thug": 37212, "scariest": 37213, "sylvester": 37214, "anan": 37215, "utrecht": 37216, "adifference": 37217, "meade": 37218, "buster": 37219, "airstrikes": 37220, "cuffs": 37221, "accountants": 37222, "ðŁĺ”ðŁĺ”": 37223, "newt": 37224, "bott": 37225, "issuing": 37226, "clancy": 37227, "wwenetwork": 37228, "kyuhyun": 37229, "resemble": 37230, "pajamas": 37231, "sink": 37232, "kinney": 37233, "sulph": 37234, "ork": 37235, "lies": 37236, "lagh": 37237, "orton": 37238, "rahul": 37239, "dsc": 37240, "wewill": 37241, "ream": 37242, "colloqui": 37243, "sharia": 37244, "hectic": 37245, "sarcasm": 37246, "lander": 37247, "tmz": 37248, "endorf": 37249, "roz": 37250, "hammered": 37251, "fris": 37252, "wadi": 37253, "popefrancis": 37254, "heit": 37255, "flashlight": 37256, "unborn": 37257, "opes": 37258, "holiness": 37259, "ðŁIJ¦": 37260, "nacht": 37261, "imsa": 37262, "gracing": 37263, "bjp": 37264, "verts": 37265, "csc": 37266, "homeowner": 37267, "aque": 37268, "bigotry": 37269, "annie": 37270, "bagh": 37271, "âĿ¤ï¸ıðŁĺį": 37272, "cari": 37273, "thomp": 37274, "disposable": 37275, "cardiology": 37276, "patented": 37277, "hhhhhh": 37278, "ldr": 37279, "stephenson": 37280, "crores": 37281, "fanning": 37282, "climat": 37283, "ðŁijįðŁijįðŁijį": 37284, "ðŁijįðŁı¼": 37285, "aeron": 37286, "piccadilly": 37287, "bankrupt": 37288, "silvia": 37289, "employ": 37290, "donny": 37291, "commenting": 37292, "screenwriter": 37293, "iota": 37294, "cean": 37295, "ancers": 37296, "tuan": 37297, "streetwear": 37298, "य": 37299, "skine": 37300, "espa": 37301, "asif": 37302, "osce": 37303, "sheppard": 37304, "morecam": 37305, "bottle": 37306, "ders": 37307, "oracle": 37308, "googleplay": 37309, "averaged": 37310, "edmonton": 37311, "stephan": 37312, "sisterhood": 37313, "crusted": 37314, "staggering": 37315, "methodology": 37316, "congresswoman": 37317, "cabo": 37318, "triggers": 37319, "milky": 37320, "glide": 37321, "toothpaste": 37322, "roommates": 37323, "nuff": 37324, "guam": 37325, "sprinkles": 37326, "alternative": 37327, "watfordfc": 37328, "uoft": 37329, "haley": 37330, "contacted": 37331, "bundy": 37332, "prostitu": 37333, "ghar": 37334, "preston": 37335, "onsite": 37336, "hilar": 37337, "gts": 37338, "catt": 37339, "hampstead": 37340, "??!": 37341, "ðŁĩ§ðŁĩ": 37342, "bbcqt": 37343, "alessandro": 37344, "resist": 37345, "maidan": 37346, "tko": 37347, "shading": 37348, "pinup": 37349, "gallo": 37350, "sinu": 37351, "atec": 37352, "funk": 37353, "aclu": 37354, "strides": 37355, "rhyme": 37356, "wetland": 37357, "bbcspringwatch": 37358, "tins": 37359, "wildcard": 37360, "stour": 37361, "flamenco": 37362, "paula": 37363, "ontology": 37364, "gangsta": 37365, "amade": 37366, "ãĤ«": 37367, "tbs": 37368, "skeletal": 37369, "runner": 37370, "jardin": 37371, "harrier": 37372, "hunted": 37373, "zhen": 37374, "believeinfilm": 37375, "demean": 37376, "auditi": 37377, "restart": 37378, "chondri": 37379, "âĿ¤ï¸ıðŁēĻ": 37380, "mclaren": 37381, "gab": 37382, "shum": 37383, "ausa": 37384, "lewisham": 37385, "ypg": 37386, "kjv": 37387, "furnished": 37388, "doro": 37389, "bonded": 37390, "morty": 37391, "latitude": 37392, "_)": 37393, "lova": 37394, "waterways": 37395, "vinai": 37396, "shorth": 37397, "drunk": 37398, "cay": 37399, "ayana": 37400, "kaplan": 37401, "cappuccino": 37402, "spro": 37403, "lifeboat": 37404, "hasbro": 37405, "spolice": 37406, "toron": 37407, "doing": 37408, "damn": 37409, "shree": 37410, "fountains": 37411, "entation": 37412, "maru": 37413, "boarder": 37414, "topless": 37415, "jada": 37416, "channing": 37417, "ulls": 37418, "enclosure": 37419, "gibson": 37420, "fractured": 37421, "britton": 37422, "ö": 37423, "tous": 37424, "porth": 37425, "draf": 37426, "trailing": 37427, "margate": 37428, "elife": 37429, "downward": 37430, "linn": 37431, "glades": 37432, "girlpower": 37433, "akrish": 37434, "uki": 37435, "ronda": 37436, "tsc": 37437, "appreciationday": 37438, "vising": 37439, "loom": 37440, "ðŁį³": 37441, "mexican": 37442, "argos": 37443, "yya": 37444, "jadine": 37445, "southport": 37446, "dend": 37447, "sista": 37448, "redeem": 37449, "meng": 37450, "braxton": 37451, "antioxidant": 37452, "skey": 37453, "mpg": 37454, "finding": 37455, "vibration": 37456, "ceu": 37457, "khart": 37458, "dimini": 37459, "cline": 37460, "shelly": 37461, "hines": 37462, "īï¸ı": 37463, "topical": 37464, "nover": 37465, "maxx": 37466, "primitive": 37467, "illustrate": 37468, "bounds": 37469, "trenton": 37470, "jointly": 37471, "breeders": 37472, "uchi": 37473, "wakeupamerica": 37474, "bada": 37475, "ðŁĹ£ï¸ı": 37476, "guacam": 37477, "spheres": 37478, "peregr": 37479, "youthful": 37480, "lolo": 37481, "birmin": 37482, "tly": 37483, "jeremycorbyn": 37484, "defects": 37485, "cosm": 37486, "arent": 37487, "vaa": 37488, "bagels": 37489, "mediac": 37490, "coriander": 37491, "icago": 37492, "ghaz": 37493, "abbas": 37494, "remodel": 37495, "structuring": 37496, "pum": 37497, "outlaw": 37498, "adani": 37499, "rbc": 37500, "gulls": 37501, "nli": 37502, "confuse": 37503, "ðŁijĩðŁı¼": 37504, "vila": 37505, "mcnamara": 37506, "corrections": 37507, "mughal": 37508, "seri": 37509, "regain": 37510, "ssb": 37511, "leave": 37512, "hahahah": 37513, "grande": 37514, "distressed": 37515, "rechargeable": 37516, "hoa": 37517, "housed": 37518, "stil": 37519, "attributed": 37520, "opathic": 37521, "dips": 37522, "prit": 37523, "headphone": 37524, "conclude": 37525, "pilo": 37526, "het": 37527, "utsa": 37528, "nitin": 37529, "jem": 37530, "snippet": 37531, "tutoring": 37532, "oper": 37533, "sunk": 37534, "ensla": 37535, "chau": 37536, "acorn": 37537, "quintess": 37538, "rankin": 37539, "affiliated": 37540, "ourlives": 37541, "clint": 37542, "seater": 37543, "isaac": 37544, "bashing": 37545, "smear": 37546, "nurse": 37547, "doodling": 37548, "\";": 37549, "saku": 37550, "atrocities": 37551, "imam": 37552, "gfs": 37553, "violating": 37554, "commend": 37555, "bradshaw": 37556, "erville": 37557, "billed": 37558, "bbe": 37559, "thulhu": 37560, "iphones": 37561, "moose": 37562, "dios": 37563, "rew": 37564, "methane": 37565, "strangely": 37566, "whisky": 37567, "tightly": 37568, "spielberg": 37569, "radius": 37570, "noticing": 37571, "wif": 37572, "ignati": 37573, "ifa": 37574, "apis": 37575, "wali": 37576, "haitian": 37577, "bushes": 37578, "yz": 37579, "vl": 37580, "exited": 37581, "assel": 37582, "truec": 37583, "domen": 37584, "asher": 37585, "inking": 37586, "newyearseve": 37587, "hendricks": 37588, "bati": 37589, "ìĿ“ì": 37590, "richter": 37591, "monsanto": 37592, "conline": 37593, "agreat": 37594, "ðŁ¤¯": 37595, "masterpieces": 37596, "arn": 37597, "roughs": 37598, "cleve": 37599, "sev": 37600, "fashions": 37601, "toya": 37602, "shail": 37603, "copeland": 37604, "aquari": 37605, "decals": 37606, "areyou": 37607, "yaya": 37608, "astr": 37609, "font": 37610, "mlm": 37611, "arca": 37612, "ppor": 37613, "pollock": 37614, "xperia": 37615, "conservation": 37616, "chainsaw": 37617, "aggie": 37618, "?!?!?": 37619, "sile": 37620, "shon": 37621, "ìĹIJ": 37622, "notebooks": 37623, "marquette": 37624, "deus": 37625, "bbled": 37626, "spicer": 37627, "mccabe": 37628, "norwich": 37629, "modification": 37630, "boosted": 37631, "strum": 37632, "salesman": 37633, "bangle": 37634, "nissan": 37635, "hezbollah": 37636, "breasts": 37637, "aaf": 37638, "anthus": 37639, "sker": 37640, "owed": 37641, "heros": 37642, "gifs": 37643, "fosters": 37644, "eaters": 37645, "dues": 37646, "_/": 37647, "lymphoma": 37648, "sfam": 37649, "megal": 37650, "afridi": 37651, "agic": 37652, "pamp": 37653, "jealousy": 37654, "ðŁijĮðŁı¼": 37655, "calculate": 37656, "napping": 37657, "gale": 37658, "ð٦Ħ": 37659, "lubbock": 37660, "assumed": 37661, "renting": 37662, "íĄľ": 37663, "suburb": 37664, "ãĤ·": 37665, "technic": 37666, "ucla": 37667, "infront": 37668, "garnet": 37669, "steroids": 37670, "striving": 37671, "howar": 37672, "mover": 37673, "leton": 37674, "bulldo": 37675, "isin": 37676, "ciao": 37677, "snz": 37678, "forefront": 37679, "dams": 37680, "midwife": 37681, "mawards": 37682, "clapton": 37683, "wein": 37684, "subsidies": 37685, "sproud": 37686, "rotherham": 37687, "phantom": 37688, "arach": 37689, "spiel": 37690, "racket": 37691, "selamat": 37692, "noon": 37693, "lbc": 37694, "entially": 37695, "ðŁē¸": 37696, "silve": 37697, "moud": 37698, "kinetic": 37699, "yasi": 37700, "ðŁİ©": 37701, "ool": 37702, "miku": 37703, "iza": 37704, "fera": 37705, "floren": 37706, "barbershop": 37707, "groot": 37708, "zest": 37709, "nears": 37710, "stanis": 37711, "zand": 37712, "policeman": 37713, "jurisdic": 37714, "formations": 37715, "apparatus": 37716, "spd": 37717, "artifact": 37718, "tosc": 37719, "motivating": 37720, "womancrush": 37721, "redro": 37722, "diagnostics": 37723, "raza": 37724, "outfitters": 37725, "elxn": 37726, "dodgy": 37727, "ryn": 37728, "shd": 37729, "orthodon": 37730, "olde": 37731, "jayanti": 37732, "balances": 37733, "quickest": 37734, "canton": 37735, "fridayreads": 37736, "!*": 37737, "naa": 37738, "aak": 37739, "ðŁĶ·": 37740, "behaviors": 37741, "raspberries": 37742, "ä»": 37743, "political": 37744, "camil": 37745, "Äľ": 37746, "dik": 37747, "astounding": 37748, "liebe": 37749, "novelty": 37750, "turmoil": 37751, "sully": 37752, "springbreak": 37753, "honouring": 37754, "ccg": 37755, "ðŁıē": 37756, "mylittle": 37757, "kyc": 37758, "proms": 37759, "ðŁķĬ": 37760, "è": 37761, "bige": 37762, "avril": 37763, "ðŁĩµðŁĩ°": 37764, "marion": 37765, "asants": 37766, "surya": 37767, "octag": 37768, "lufthan": 37769, "acron": 37770, "fayetteville": 37771, "tique": 37772, "loves": 37773, "enca": 37774, "dekalb": 37775, "taver": 37776, "devote": 37777, "auxiliary": 37778, "johannes": 37779, "treadmill": 37780, "ayan": 37781, "qur": 37782, "donaldson": 37783, "cheryl": 37784, "\"....": 37785, "sven": 37786, "kirsty": 37787, "gunners": 37788, "radish": 37789, "oahu": 37790, "vsky": 37791, "ible": 37792, "concourse": 37793, "bps": 37794, "eloqu": 37795, "ashford": 37796, "tebow": 37797, "roblox": 37798, "mada": 37799, "driving": 37800, "thday": 37801, "sproject": 37802, "mms": 37803, "banded": 37804, ".!!": 37805, "librarians": 37806, "flannel": 37807, "intolerance": 37808, "heral": 37809, "çµ": 37810, "nemesis": 37811, "lista": 37812, "tarak": 37813, "crypt": 37814, "starplus": 37815, "vishnu": 37816, "scale": 37817, "cris": 37818, "%),": 37819, "jillian": 37820, "reggae": 37821, "pegasus": 37822, "olin": 37823, "ipment": 37824, "manic": 37825, "lfc": 37826, "goddard": 37827, "iteam": 37828, "parlour": 37829, "anchors": 37830, "leeminho": 37831, "tallahassee": 37832, "antit": 37833, "dho": 37834, "kidney": 37835, "yash": 37836, "battled": 37837, "azad": 37838, "garis": 37839, "faulkner": 37840, "sniff": 37841, "paparazzi": 37842, "edm": 37843, "phyllis": 37844, "contested": 37845, "aaay": 37846, "seca": 37847, "kton": 37848, "velve": 37849, "rainier": 37850, "forum": 37851, "tampab": 37852, "hosp": 37853, "tractors": 37854, "oxfordshire": 37855, "notion": 37856, "guangzhou": 37857, "ðŁĺ¯": 37858, "refill": 37859, "wednesdaymotivation": 37860, "slider": 37861, "mukherjee": 37862, "pratt": 37863, "fontaine": 37864, "alphon": 37865, "afar": 37866, "tsi": 37867, "pesticides": 37868, "fiends": 37869, "mocking": 37870, "braw": 37871, "transat": 37872, "doses": 37873, "cores": 37874, "homophobia": 37875, "documenting": 37876, "zlatan": 37877, "condoms": 37878, "sé": 37879, "sunset": 37880, "kunst": 37881, "tonga": 37882, "Ć ĀøĀŖ": 37883, "vation": 37884, "spray": 37885, "chowder": 37886, "raps": 37887, "palladium": 37888, "norwood": 37889, "musichistory": 37890, "hooker": 37891, "sisi": 37892, "osprey": 37893, "phys": 37894, "conceded": 37895, "bobcat": 37896, "armad": 37897, "zeit": 37898, "ƙĦ": 37899, "ðŁĺģðŁĺģ": 37900, "meridi": 37901, "ðŁĩ·ðŁĩº": 37902, "cornwall": 37903, "!),": 37904, "touchdowns": 37905, "zeit": 37906, "chalet": 37907, "mmm": 37908, "alche": 37909, "gorilla": 37910, "foss": 37911, "atiku": 37912, "luminous": 37913, "ivanka": 37914, "beek": 37915, "stares": 37916, "swiss": 37917, "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, "scrubs": 37919, "meath": 37920, "gustav": 37921, "jogging": 37922, "confetti": 37923, "asos": 37924, "ersfc": 37925, "breitbart": 37926, "applicable": 37927, "authored": 37928, "yaho": 37929, "hin": 37930, "displacement": 37931, "jv": 37932, "ðŁĮ¹ðŁĮ¹": 37933, "otc": 37934, "nonprofits": 37935, "diecast": 37936, "gusto": 37937, "intestin": 37938, "cages": 37939, "meen": 37940, "lukas": 37941, "mooney": 37942, "ðŁĺ·": 37943, "veryday": 37944, "torah": 37945, "ission": 37946, "wac": 37947, "leveraging": 37948, "ishable": 37949, "cuse": 37950, "lewood": 37951, "mayan": 37952, "turntable": 37953, "juice": 37954, "trusty": 37955, "tup": 37956, "etiquette": 37957, "supervisors": 37958, "stun": 37959, "guzman": 37960, "conferen": 37961, "rico": 37962, "feast": 37963, "backward": 37964, "polaris": 37965, "miche": 37966, "jog": 37967, "hing": 37968, "fieldhouse": 37969, "veling": 37970, "shocker": 37971, "escence": 37972, "ा": 37973, "vibe": 37974, "anastasia": 37975, "marched": 37976, "killing": 37977, "Ķƫ": 37978, "fett": 37979, "exoplan": 37980, "...(": 37981, "snowday": 37982, "loh": 37983, "irani": 37984, "lakhs": 37985, "dela": 37986, "pocaly": 37987, "boomers": 37988, "dictatorship": 37989, "acer": 37990, "turkeys": 37991, "quarterfinal": 37992, "musketeers": 37993, "ðŁēĽðŁēļ": 37994, "sfx": 37995, "museumweek": 37996, "scala": 37997, "risis": 37998, "(ðŁĵ·": 37999, "ãĢĤ": 38000, "zies": 38001, "boeh": 38002, "hues": 38003, "lusci": 38004, "dola": 38005, "impeachtrump": 38006, "rood": 38007, "doncaster": 38008, "torre": 38009, "heroes": 38010, "foyer": 38011, "tari": 38012, "blurred": 38013, "kew": 38014, "frankly": 38015, "droid": 38016, "apal": 38017, "м": 38018, "yaf": 38019, "bret": 38020, "paragu": 38021, "cacao": 38022, "ðŁĻĮðŁı¾": 38023, "rue": 38024, "headaches": 38025, "shawty": 38026, "charley": 38027, "paler": 38028, "gowns": 38029, "correctional": 38030, "ðŁĺ©ðŁĺ©": 38031, "breakingbad": 38032, "oling": 38033, "dap": 38034, "endeavour": 38035, "citadel": 38036, "trad": 38037, "incumbent": 38038, "meditate": 38039, "footed": 38040, "ðŁēµ": 38041, "shabbat": 38042, "dayofthe": 38043, "willem": 38044, "galway": 38045, "tored": 38046, "marriage": 38047, "fillion": 38048, "sleeveless": 38049, "auditor": 38050, "jinyoung": 38051, "invincible": 38052, "kaduna": 38053, "aand": 38054, "volcanoes": 38055, "moneti": 38056, "indiegogo": 38057, "buccaneers": 38058, "ðŁijīðŁı½": 38059, "ãĢĤ": 38060, "layton": 38061, "cuckoo": 38062, "humber": 38063, "buzzer": 38064, "ĆÄ«": 38065, "tore": 38066, "strains": 38067, "stom": 38068, "paine": 38069, "swe": 38070, "duff": 38071, "zou": 38072, "simi": 38073, "lipp": 38074, "urn": 38075, "seagu": 38076, "ðŁĶ®": 38077, "sundae": 38078, "hic": 38079, "ðŁĺ¨": 38080, "bullpen": 38081, "uper": 38082, "flyover": 38083, "aldridge": 38084, "globes": 38085, "alies": 38086, "kenzie": 38087, "gees": 38088, "ycle": 38089, "splin": 38090, "magenta": 38091, "jha": 38092, "balu": 38093, "ghorn": 38094, "tipper": 38095, "wicker": 38096, "tasteof": 38097, "conclave": 38098, "chale": 38099, "invasi": 38100, "cater": 38101, "dioxide": 38102, "megab": 38103, "winn": 38104, "atp": 38105, "transformative": 38106, "nestled": 38107, "hig": 38108, "bridging": 38109, "lilies": 38110, "cheered": 38111, "baddest": 38112, "scrolls": 38113, "realis": 38114, "diplo": 38115, "ðŁĶ«": 38116, "concession": 38117, "preferences": 38118, "explodes": 38119, "ergon": 38120, "introductory": 38121, "ineau": 38122, "chaf": 38123, "somes": 38124, "landrover": 38125, "spiration": 38126, "sexy": 38127, "scorecard": 38128, "illustrates": 38129, "soulmate": 38130, "wien": 38131, "interdisciplinary": 38132, "forecasting": 38133, "entities": 38134, "glued": 38135, "enlar": 38136, "curt": 38137, "perceptions": 38138, "bootleg": 38139, "mire": 38140, "ashok": 38141, "vaz": 38142, "horne": 38143, "calle": 38144, "aculture": 38145, "theroy": 38146, "nighttime": 38147, "ocal": 38148, "characterdesign": 38149, "armist": 38150, "ðŁĺıðŁĺı": 38151, "yahoo": 38152, "aceae": 38153, "tose": 38154, "evento": 38155, "sout": 38156, "nayanth": 38157, "whom": 38158, "vare": 38159, "rigging": 38160, "genus": 38161, "hive": 38162, "commands": 38163, "stie": 38164, "daya": 38165, "ethanol": 38166, "enf": 38167, "hifi": 38168, "fluence": 38169, "clemson": 38170, "reinvent": 38171, "thermometer": 38172, "humorous": 38173, "emerging": 38174, "ación": 38175, "ðŁĺĺðŁĺį": 38176, "sity": 38177, "hawke": 38178, "accompanying": 38179, "tility": 38180, "ðŁĺª": 38181, "recess": 38182, "protagonist": 38183, "lery": 38184, "dundal": 38185, "intl": 38186, "brittany": 38187, "qbs": 38188, "offthe": 38189, "marriages": 38190, "howto": 38191, "violated": 38192, "adelaide": 38193, "witt": 38194, "lancer": 38195, "pakv": 38196, "hume": 38197, "stade": 38198, "bragging": 38199, "outright": 38200, "adc": 38201, "superst": 38202, "realtime": 38203, "cures": 38204, "gardeners": 38205, "erock": 38206, "dalejr": 38207, "vero": 38208, "bartol": 38209, "moti": 38210, "mcfly": 38211, "vpn": 38212, "stink": 38213, "overrated": 38214, "guerra": 38215, "etis": 38216, "athome": 38217, "twdfamily": 38218, "thab": 38219, "tnx": 38220, "rafael": 38221, "familytravel": 38222, "xley": 38223, "satanic": 38224, "equations": 38225, "rudy": 38226, "waldorf": 38227, "stani": 38228, "tube": 38229, "measles": 38230, "zimmerman": 38231, "obligations": 38232, "iously": 38233, "bowser": 38234, "transformer": 38235, "shoppe": 38236, "shaken": 38237, "ghouse": 38238, "tod": 38239, "ketball": 38240, "shareholder": 38241, "marca": 38242, "kpmg": 38243, "akan": 38244, "givenchy": 38245, "coastal": 38246, "auth": 38247, "rollercoaster": 38248, "marches": 38249, "coordinate": 38250, "cinema": 38251, "apprentices": 38252, "parlor": 38253, "mito": 38254, "menon": 38255, "considerable": 38256, "barre": 38257, "gloss": 38258, "enhances": 38259, "jazeera": 38260, "falmouth": 38261, "thrash": 38262, "staten": 38263, "kzn": 38264, "engel": 38265, "samanthap": 38266, "floppy": 38267, "salom": 38268, "ðŁıĨðŁıĨ": 38269, "wack": 38270, "deliberate": 38271, "oscill": 38272, "heritag": 38273, "dusted": 38274, "ornithology": 38275, "paddle": 38276, "ferns": 38277, "barun": 38278, "clans": 38279, "anticipate": 38280, "aay": 38281, "matically": 38282, "Ʃĩ": 38283, "tumble": 38284, "postman": 38285, "unicef": 38286, "trotter": 38287, "opd": 38288, "leaflet": 38289, "geist": 38290, "ceasefire": 38291, "screws": 38292, "creation": 38293, "walnuts": 38294, "longhorns": 38295, "understatement": 38296, "abb": 38297, "proximity": 38298, "nax": 38299, "unity": 38300, "turnpike": 38301, "ordained": 38302, "dubstep": 38303, "chakra": 38304, "mech": 38305, "loveher": 38306, "lookalike": 38307, "donnein": 38308, "viron": 38309, "ÙĪ": 38310, "bangers": 38311, "variants": 38312, "outdated": 38313, "inta": 38314, "cristo": 38315, "spelt": 38316, "foodand": 38317, "fon": 38318, "stefani": 38319, "marginal": 38320, "hutton": 38321, "tiara": 38322, "telford": 38323, "quen": 38324, "fairgrounds": 38325, "quetta": 38326, "mikhail": 38327, "healer": 38328, "vball": 38329, "tyre": 38330, "undergrad": 38331, "glend": 38332, "homers": 38333, "scribed": 38334, "maintains": 38335, "poche": 38336, "missal": 38337, "marko": 38338, "uas": 38339, "Ôn": 38340, "shp": 38341, "convey": 38342, "padre": 38343, "saba": 38344, "puglia": 38345, "madhuri": 38346, "paxton": 38347, "chaplain": 38348, "nago": 38349, "casi": 38350, "...!!!": 38351, "flirt": 38352, "saleh": 38353, "kare": 38354, "dire": 38355, "stamped": 38356, "extreme": 38357, "ðŁĺĄðŁĺĄ": 38358, "hoppy": 38359, "guadalupe": 38360, "advantaged": 38361, "euchar": 38362, "plow": 38363, "unn": 38364, "macqu": 38365, "portland": 38366, "clash": 38367, "pes": 38368, "loubout": 38369, "yp": 38370, "keeping": 38371, "arcadia": 38372, "frankie": 38373, "fiu": 38374, "deth": 38375, "encyclopedia": 38376, "size": 38377, "invests": 38378, "ðŁį©": 38379, "geological": 38380, "franç": 38381, "confront": 38382, "ðŁĺ„": 38383, "dys": 38384, "afm": 38385, "texan": 38386, "graphene": 38387, "repostapp": 38388, "acf": 38389, "ursula": 38390, "gaza": 38391, "ddled": 38392, "fum": 38393, "wsbtv": 38394, "mbe": 38395, "frontiers": 38396, "chronograph": 38397, "kes": 38398, "interfaith": 38399, "taboo": 38400, "sparta": 38401, "wondo": 38402, "florist": 38403, "embraces": 38404, "caw": 38405, "noel": 38406, "archers": 38407, "ðŁIJ·": 38408, "romano": 38409, "banan": 38410, "shakers": 38411, "melodies": 38412, "geothermal": 38413, "sephora": 38414, "ìļ°": 38415, "оГ": 38416, "proc": 38417, "handshake": 38418, "pande": 38419, "populated": 38420, "slowdown": 38421, "hortons": 38422, "registrations": 38423, "undeni": 38424, "lants": 38425, "passover": 38426, "thakur": 38427, "lief": 38428, "adhesive": 38429, "petal": 38430, "microscopy": 38431, "memphis": 38432, "confirming": 38433, "airdrop": 38434, "mesmer": 38435, "perceived": 38436, "mingle": 38437, "lifeline": 38438, "ghj": 38439, "worcestershire": 38440, "passions": 38441, "acher": 38442, "ellar": 38443, "aho": 38444, "firenze": 38445, "barang": 38446, "letterman": 38447, "hatfield": 38448, "lucha": 38449, "jeter": 38450, "eshop": 38451, "williams": 38452, "horoscope": 38453, "prede": 38454, "eastbourne": 38455, "durga": 38456, "diversion": 38457, "altrin": 38458, "seismic": 38459, "premiosm": 38460, "narco": 38461, "tir": 38462, "orig": 38463, "orm": 38464, "landfall": 38465, "cious": 38466, "lindo": 38467, "maxine": 38468, "xico": 38469, "tray": 38470, "oswald": 38471, "cba": 38472, "ricotta": 38473, "ncr": 38474, "marau": 38475, "า": 38476, "gladiator": 38477, "chery": 38478, "lung": 38479, "ume": 38480, "popsic": 38481, "longing": 38482, "canals": 38483, "taya": 38484, "decentralized": 38485, "shopp": 38486, "pressures": 38487, "maharaj": 38488, "etihad": 38489, "walgreens": 38490, "succession": 38491, "signaling": 38492, "lig": 38493, "staffer": 38494, "northkorea": 38495, "defying": 38496, "asma": 38497, "deg": 38498, "perimeter": 38499, "oakville": 38500, "msk": 38501, "baltimore": 38502, "receip": 38503, "deple": 38504, "ðŁĺŃðŁĺĤ": 38505, "jamboree": 38506, ">.<": 38507, "rspb": 38508, "punisher": 38509, "considerably": 38510, "intothe": 38511, "parisian": 38512, "accelerated": 38513, "polyester": 38514, "lowes": 38515, "frying": 38516, "sautéed": 38517, "mouths": 38518, "seychelles": 38519, "rax": 38520, "godis": 38521, "dakota": 38522, "housewives": 38523, "theme": 38524, "matinee": 38525, "blackbird": 38526, "yesung": 38527, "prefers": 38528, "pellegr": 38529, "inated": 38530, "trunks": 38531, "strongertogether": 38532, "repet": 38533, "repairing": 38534, "pedals": 38535, "tolerant": 38536, "herr": 38537, "dunne": 38538, "indication": 38539, "decatur": 38540, "btv": 38541, "exhibitors": 38542, "ikon": 38543, "fridaymotivation": 38544, "bragg": 38545, "livetweet": 38546, "alves": 38547, "womensart": 38548, "foreigners": 38549, "wallets": 38550, "mindy": 38551, "laney": 38552, "bbin": 38553, "tvmiaw": 38554, "lifter": 38555, "target": 38556, "tame": 38557, "drou": 38558, "astrophotography": 38559, "mpc": 38560, "gpu": 38561, "nordstrom": 38562, "friction": 38563, "runoff": 38564, "lovable": 38565, "spnfamily": 38566, "extingui": 38567, "bloody": 38568, "schel": 38569, "artistry": 38570, "swish": 38571, "scarce": 38572, "phils": 38573, "maxim": 38574, "possum": 38575, "compromised": 38576, "styli": 38577, "scfc": 38578, "issa": 38579, "birmingham": 38580, "sketched": 38581, "angelica": 38582, "ordinance": 38583, "jets": 38584, "conquer": 38585, "ðŁĺIJ": 38586, "onlineshopping": 38587, "sori": 38588, "reasonably": 38589, "nuestro": 38590, "arturo": 38591, "chl": 38592, "benefici": 38593, "sphoto": 38594, "welt": 38595, "nikk": 38596, "ð٤ŀ": 38597, "danao": 38598, "formid": 38599, "asse": 38600, "afirst": 38601, "âľĤ": 38602, "gillette": 38603, "assor": 38604, "anonym": 38605, "selca": 38606, "femi": 38607, "bearable": 38608, "yand": 38609, "armory": 38610, "crepe": 38611, "celticfc": 38612, "bravo": 38613, "inexpensive": 38614, "delec": 38615, "gecko": 38616, "newmarket": 38617, "snowflakes": 38618, "kabir": 38619, "contra": 38620, "canning": 38621, "morpho": 38622, "garwal": 38623, "ðŁēĄðŁı»": 38624, "fighting": 38625, "mutation": 38626, "woody": 38627, "jugg": 38628, "graces": 38629, "premiosmtvmiaw": 38630, "kennedy": 38631, "gup": 38632, "sae": 38633, "opha": 38634, "offspring": 38635, "finisher": 38636, "betts": 38637, "spanning": 38638, "marj": 38639, "hone": 38640, "shing": 38641, "continents": 38642, "samanthaprabhu": 38643, "unrelated": 38644, "lacy": 38645, "explosions": 38646, "benjamin": 38647, "sophie": 38648, "noting": 38649, "microsoft": 38650, "assen": 38651, "ahoy": 38652, "iker": 38653, "hofer": 38654, "moe": 38655, "ahmadi": 38656, "yann": 38657, "anak": 38658, "mahi": 38659, "beu": 38660, "ahah": 38661, "creeper": 38662, "baahubali": 38663, "amat": 38664, "priory": 38665, "hawkeye": 38666, "deloitte": 38667, "skoda": 38668, "printmaking": 38669, "assembling": 38670, "miraculous": 38671, "noch": 38672, "swo": 38673, "lega": 38674, "operates": 38675, "borderlands": 38676, "elie": 38677, "strongh": 38678, "reptiles": 38679, "pirate": 38680, "unfold": 38681, "¯": 38682, "qualcomm": 38683, "unpredictable": 38684, "otr": 38685, "rosewood": 38686, "directional": 38687, "counselors": 38688, "cornell": 38689, "liberated": 38690, "jad": 38691, "irregular": 38692, "bulgarian": 38693, "highness": 38694, "vodafone": 38695, "swild": 38696, "minimize": 38697, "grazie": 38698, "à¹ĩ": 38699, "rstats": 38700, "streep": 38701, "ometric": 38702, "humble": 38703, "lump": 38704, "lille": 38705, "bü": 38706, "homedepot": 38707, "tripadvisor": 38708, "kiwan": 38709, "avia": 38710, "erz": 38711, "exico": 38712, "duf": 38713, "blumen": 38714, "mizing": 38715, "arma": 38716, "inim": 38717, "constan": 38718, "sora": 38719, "jual": 38720, "aun": 38721, "twell": 38722, "trenches": 38723, "hera": 38724, "rk": 38725, "poplar": 38726, "recipeoftheday": 38727, "llan": 38728, "bhuban": 38729, "shortages": 38730, "ingdon": 38731, "bridgewater": 38732, "ðŁIJĺ": 38733, "fortnite": 38734, "camden": 38735, "uncture": 38736, "prow": 38737, "colonies": 38738, "tks": 38739, "ngo": 38740, "bhm": 38741, "livepd": 38742, "splace": 38743, "slike": 38744, "happyeaster": 38745, "terrence": 38746, "revolver": 38747, "jed": 38748, "yyyy": 38749, "officeof": 38750, "mts": 38751, "existential": 38752, "rourke": 38753, "explorebc": 38754, "ssed": 38755, "priest": 38756, "vixen": 38757, "siding": 38758, "kpa": 38759, "ahar": 38760, "juic": 38761, "obstruc": 38762, "forensics": 38763, "ukmfg": 38764, "cancellation": 38765, "weary": 38766, "abq": 38767, "elec": 38768, "prized": 38769, "debts": 38770, "mezz": 38771, "salvatore": 38772, "mdc": 38773, "grette": 38774, "cgc": 38775, "thon": 38776, "snowstorm": 38777, "tsch": 38778, "cookery": 38779, "Ĺ": 38780, "waxing": 38781, "nacional": 38782, "murs": 38783, "rave": 38784, "capes": 38785, "germain": 38786, "dripping": 38787, "submitting": 38788, "omelette": 38789, "iteration": 38790, "ajes": 38791, "shimmer": 38792, "fueling": 38793, "ðŁĩ§ðŁĩª": 38794, "lipo": 38795, "bobble": 38796, "unfollow": 38797, "islamist": 38798, "hiber": 38799, "cats": 38800, "agentsofshield": 38801, "sensi": 38802, "_____": 38803, "steria": 38804, "instal": 38805, "auspicious": 38806, "harrow": 38807, "overland": 38808, "feminists": 38809, "instant": 38810, "chariot": 38811, "blindness": 38812, "sped": 38813, "scarec": 38814, "nuit": 38815, "miniatures": 38816, "hoseok": 38817, "glock": 38818, "fifaworldcup": 38819, "ete": 38820, "dism": 38821, "weiner": 38822, "exfoli": 38823, "earts": 38824, "à¸Ķ": 38825, "myart": 38826, "manil": 38827, "issant": 38828, "forma": 38829, "incu": 38830, "buffalob": 38831, "intim": 38832, "mccul": 38833, "anjali": 38834, "popo": 38835, "undoub": 38836, "hila": 38837, "fungal": 38838, "thankful": 38839, "futur": 38840, "endish": 38841, "rends": 38842, "thar": 38843, "sheff": 38844, "ringo": 38845, "nicholls": 38846, "iowa": 38847, "potom": 38848, "clams": 38849, "ãģĦ": 38850, "aconf": 38851, "stadiums": 38852, "dimp": 38853, "dik": 38854, "residences": 38855, "dov": 38856, "caricature": 38857, "seagull": 38858, "klm": 38859, "confess": 38860, "slapped": 38861, "celeb": 38862, "turbines": 38863, "ppv": 38864, "nurture": 38865, "elab": 38866, ".....#": 38867, "tuff": 38868, "depress": 38869, "alfar": 38870, "amiibo": 38871, "dispon": 38872, "ewing": 38873, "queer": 38874, "friends": 38875, "forre": 38876, "âĺ¼": 38877, "swt": 38878, "aquarius": 38879, "headliner": 38880, "curd": 38881, "figs": 38882, "otters": 38883, "lovefl": 38884, "kareem": 38885, "govegan": 38886, "friyay": 38887, "consolation": 38888, "atri": 38889, "ì§Ħ": 38890, "âĺĿï¸ı": 38891, "polyne": 38892, "gued": 38893, "oya": 38894, "laus": 38895, "intestinal": 38896, "camilla": 38897, "scalp": 38898, "pir": 38899, "leeds": 38900, "horrifying": 38901, "boretum": 38902, "dandelion": 38903, "ferrer": 38904, "ellic": 38905, "asx": 38906, "soren": 38907, "reloaded": 38908, "aleague": 38909, "navigator": 38910, "inette": 38911, "addams": 38912, "alchemist": 38913, "akshay": 38914, "dystopian": 38915, "awec": 38916, "naya": 38917, "alisa": 38918, "ailed": 38919, "agor": 38920, "aviator": 38921, "alizer": 38922, "smobile": 38923, "findyourpark": 38924, "copying": 38925, "toddy": 38926, "shti": 38927, "monger": 38928, "calhoun": 38929, "napkin": 38930, "breakup": 38931, "yatra": 38932, "sethu": 38933, "richi": 38934, "erasmus": 38935, "ferry": 38936, "amore": 38937, "practise": 38938, "bobo": 38939, "powerpoint": 38940, "oose": 38941, "liffe": 38942, "china": 38943, "shka": 38944, "fadnavis": 38945, "duane": 38946, "waron": 38947, "false": 38948, "ðŁļĤ": 38949, "washes": 38950, "discip": 38951, "========": 38952, "gk": 38953, "abb": 38954, "stubborn": 38955, "medieval": 38956, "pci": 38957, "ðŁįª": 38958, "marilyn": 38959, "hyo": 38960, "mandi": 38961, "cri": 38962, "predecess": 38963, "continuation": 38964, "omusic": 38965, "slat": 38966, "whal": 38967, "mallory": 38968, "bonn": 38969, "shenzhen": 38970, "cai": 38971, "âĺĄ": 38972, "safest": 38973, "forwards": 38974, "drawers": 38975, "blasted": 38976, "slee": 38977, "morphe": 38978, "mbta": 38979, "dumbass": 38980, "ÑĦоÑĤо": 38981, "alhamdulillah": 38982, "eclub": 38983, "albeit": 38984, "healey": 38985, "ayurveda": 38986, "advertised": 38987, "crocs": 38988, "ittles": 38989, "bryson": 38990, "bei": 38991, "njpw": 38992, "honoree": 38993, "fused": 38994, "ðŁĶĺ": 38995, "multin": 38996, "naga": 38997, "departs": 38998, "kop": 38999, "kino": 39000, "jharkhand": 39001, "edna": 39002, "axle": 39003, "milton": 39004, "supremacist": 39005, "marrakech": 39006, "dominic": 39007, "transcript": 39008, "][#": 39009, ":).": 39010, "woc": 39011, "surrounds": 39012, "ogil": 39013, "leaflets": 39014, "cowell": 39015, "whew": 39016, "trude": 39017, "prolifer": 39018, "succes": 39019, "sportsman": 39020, "condom": 39021, "poche": 39022, "kup": 39023, "imprisonment": 39024, "{}": 39025, "scrambled": 39026, "ÄĽ": 39027, "kaine": 39028, "cellphone": 39029, "metamor": 39030, "coni": 39031, "remnants": 39032, "eez": 39033, "downpour": 39034, "afternoon": 39035, "exercising": 39036, "berser": 39037, "architecture": 39038, "wicklow": 39039, "mns": 39040, "isp": 39041, "boc": 39042, "niss": 39043, "mnwild": 39044, "stumble": 39045, "rsi": 39046, "luffy": 39047, "silen": 39048, "ddad": 39049, "bullies": 39050, "hawker": 39051, "bbcc": 39052, "scuba": 39053, "epp": 39054, "quets": 39055, "foraging": 39056, "pallet": 39057, "hadi": 39058, "cinematographer": 39059, "catchers": 39060, "toaster": 39061, "khi": 39062, "litecoin": 39063, "kidlit": 39064, "amherst": 39065, "mauricio": 39066, "ipad": 39067, "marmalade": 39068, "fey": 39069, "donnelly": 39070, "gto": 39071, "estas": 39072, "cerebral": 39073, "antgrasso": 39074, "zzled": 39075, "virgil": 39076, "swapped": 39077, "ðŁĺħðŁĺħ": 39078, "nodapl": 39079, "greatest": 39080, "nhlbruins": 39081, "fraser": 39082, "bmo": 39083, "anew": 39084, ".âĿ¤ï¸ı": 39085, "segregation": 39086, "remarkably": 39087, "mccormick": 39088, "logger": 39089, "eras": 39090, "contracting": 39091, "âłĢâłĢ": 39092, "yorks": 39093, "ukulele": 39094, "touchscreen": 39095, "decked": 39096, "benn": 39097, "southwark": 39098, "ravin": 39099, "numis": 39100, "ð٤Ļ": 39101, "rut": 39102, "greco": 39103, "ethic": 39104, "redneck": 39105, "arr": 39106, "tcs": 39107, "ihri": 39108, "ðŁĩ«ðŁĩ·": 39109, "lk": 39110, "inherited": 39111, "zyk": 39112, "viaduct": 39113, "martyred": 39114, "higu": 39115, "ssn": 39116, "bein": 39117, "streetstyle": 39118, "fergie": 39119, "bankof": 39120, "æĹ„": 39121, "stakeholder": 39122, "exemplary": 39123, "cress": 39124, "essa": 39125, "erotica": 39126, "intrepid": 39127, "gomes": 39128, "braun": 39129, "bethany": 39130, "bangtan": 39131, "pulmonary": 39132, "milling": 39133, "doctorate": 39134, "trumprussia": 39135, "र": 39136, "sani": 39137, "blatt": 39138, "plau": 39139, "deprived": 39140, "tle": 39141, "fully": 39142, "bourn": 39143, "stak": 39144, "lufthansa": 39145, "kiosk": 39146, "faroo": 39147, "defy": 39148, "badan": 39149, "ðŁĺĺâĿ¤ï¸ı": 39150, "ritz": 39151, "trisha": 39152, "rands": 39153, "middlesex": 39154, "arabs": 39155, "proj": 39156, "sportscenter": 39157, "repeats": 39158, "ivf": 39159, "bleedblue": 39160, "assure": 39161, "obs": 39162, "territorial": 39163, "elen": 39164, "beverley": 39165, "annah": 39166, "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, "zl": 39168, "forgood": 39169, "sciencefiction": 39170, "glau": 39171, "sonya": 39172, "prith": 39173, "stweets": 39174, "mixers": 39175, "mario": 39176, "antelope": 39177, "writingcommunity": 39178, "wentz": 39179, "denham": 39180, "bedi": 39181, "sfo": 39182, "harleydavidson": 39183, "lookbook": 39184, "immunotherapy": 39185, "orphe": 39186, "esville": 39187, "edged": 39188, "task": 39189, "sbball": 39190, "corrosion": 39191, "kilometers": 39192, "costing": 39193, "playback": 39194, "keke": 39195, "divisi": 39196, "uter": 39197, "relocation": 39198, "yelled": 39199, "peng": 39200, "upbeat": 39201, "serve": 39202, "âļł": 39203, "halen": 39204, "stirring": 39205, "rehman": 39206, "env": 39207, "schumacher": 39208, "fragment": 39209, "alkaline": 39210, "sbk": 39211, "resili": 39212, "sharepoint": 39213, "rollover": 39214, "trash": 39215, "counterpart": 39216, "âĻ«": 39217, "obitu": 39218, "à½": 39219, "ãĤ¹": 39220, "mulberry": 39221, "ðŁİĨ": 39222, "autonomy": 39223, "spraying": 39224, "natl": 39225, "loveyou": 39226, "franki": 39227, "nuk": 39228, "escar": 39229, "canteen": 39230, "alibaba": 39231, "deplor": 39232, "molecule": 39233, "pud": 39234, "fortnight": 39235, "blondie": 39236, "sphin": 39237, "portrayal": 39238, "tache": 39239, "bute": 39240, "consisting": 39241, "freepalestine": 39242, "csp": 39243, "immort": 39244, "dns": 39245, "ðŁē„ðŁē„": 39246, "tourde": 39247, "cooking": 39248, "archival": 39249, "gathers": 39250, "bitt": 39251, "banc": 39252, "premature": 39253, "snowball": 39254, "poetryday": 39255, "loudly": 39256, "fugitive": 39257, "eday": 39258, "emra": 39259, "ðŁĩ¸ðŁĩª": 39260, "scien": 39261, "nodejs": 39262, "jurgen": 39263, "jeong": 39264, "bandana": 39265, "unis": 39266, "foxsports": 39267, "vandy": 39268, "provisions": 39269, "weep": 39270, "tuk": 39271, "iko": 39272, "houn": 39273, "ziggy": 39274, "zr": 39275, "fillet": 39276, "bata": 39277, "tink": 39278, "cone": 39279, "wewant": 39280, "kilo": 39281, "horace": 39282, "slt": 39283, "sct": 39284, "staytuned": 39285, "victoria": 39286, "umbria": 39287, "attacker": 39288, "inghamshire": 39289, "frightening": 39290, "noir": 39291, "frat": 39292, "contempt": 39293, "liaison": 39294, "hoi": 39295, "brink": 39296, "trill": 39297, "niagar": 39298, "kickass": 39299, "dundas": 39300, "notmy": 39301, "rhode": 39302, "bumble": 39303, "noxi": 39304, "fag": 39305, "spectators": 39306, "mancrushmonday": 39307, "jinping": 39308, "distract": 39309, "daisy": 39310, "walden": 39311, "portrait": 39312, "arthistory": 39313, "voltron": 39314, "evel": 39315, "isc": 39316, "acm": 39317, "rite": 39318, "nao": 39319, "deported": 39320, "sweats": 39321, "rufus": 39322, "lobo": 39323, "laborday": 39324, "gamo": 39325, "ihrithik": 39326, "blit": 39327, "abdominal": 39328, "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, "iit": 39330, "eq": 39331, "busy": 39332, "alluarjun": 39333, "undisclosed": 39334, "deton": 39335, "procreate": 39336, "kil": 39337, "ðŁİĤðŁİĤ": 39338, "mitchell": 39339, "kii": 39340, "inheritance": 39341, "alp": 39342, "joburg": 39343, "patrolling": 39344, "compulsory": 39345, "unsigned": 39346, "niam": 39347, "lga": 39348, "eshopsuk": 39349, "trilli": 39350, "maw": 39351, "appreciating": 39352, "rockab": 39353, "mañana": 39354, "antal": 39355, "malvern": 39356, "royo": 39357, "grandprix": 39358, "sutton": 39359, "goftheday": 39360, "digi": 39361, "ãħĭãħĭãħĭãħĭ": 39362, "tles": 39363, "varanasi": 39364, "erected": 39365, "disciples": 39366, "contact": 39367, "ðŁĺµ": 39368, "lid": 39369, "â¬ĩ": 39370, "scentre": 39371, "radiator": 39372, "ingtips": 39373, "transitions": 39374, "thursdaymotivation": 39375, "chemical": 39376, "separati": 39377, "salis": 39378, "mim": 39379, "geographical": 39380, "bookfest": 39381, "/.": 39382, "âľĭ": 39383, "vae": 39384, "currie": 39385, "aggarwal": 39386, "acceleration": 39387, "theses": 39388, "lgm": 39389, "umass": 39390, "proportions": 39391, "nata": 39392, "anians": 39393, "kuch": 39394, "beacons": 39395, "apr": 39396, "@#": 39397, "ðŁēªðŁı¾": 39398, "nuke": 39399, "sheraton": 39400, "kio": 39401, "makati": 39402, "politico": 39403, "morale": 39404, "ƬĻ": 39405, "economically": 39406, "ggly": 39407, "ssen": 39408, "pastries": 39409, "internships": 39410, "vicente": 39411, "fantaken": 39412, "avengers": 39413, "accuse": 39414, "sleepover": 39415, "indicated": 39416, "thedream": 39417, "sterone": 39418, "renders": 39419, "frost": 39420, "oui": 39421, "gregg": 39422, "dore": 39423, "⾨⾨⾨": 39424, "pugs": 39425, "saty": 39426, "numb": 39427, "hemsworth": 39428, "tami": 39429, "lassic": 39430, "schiff": 39431, "iglesias": 39432, "agawa": 39433, "]\"": 39434, "reshi": 39435, "gamestop": 39436, "divorced": 39437, "theater": 39438, "claudi": 39439, "unconventional": 39440, "prophets": 39441, "acin": 39442, "twelf": 39443, "towering": 39444, "tml": 39445, "sclerosis": 39446, "kwan": 39447, "gets": 39448, "disturb": 39449, "naira": 39450, "energ": 39451, "piracy": 39452, "pruitt": 39453, "notified": 39454, "henna": 39455, "bram": 39456, "groundwater": 39457, "bls": 39458, "optimis": 39459, "$)": 39460, "lucie": 39461, "bizhour": 39462, "fangirling": 39463, "grills": 39464, "orl": 39465, "verse": 39466, "cina": 39467, "lawless": 39468, "artistsontwitter": 39469, "televised": 39470, "marshmallows": 39471, "radiohead": 39472, "barr": 39473, "mfc": 39474, "brevi": 39475, "mmorpg": 39476, "gaya": 39477, "âĸ«": 39478, "subtitles": 39479, "jt": 39480, "disneyland": 39481, "tobago": 39482, "nhm": 39483, "groove": 39484, "fiawec": 39485, "\"/": 39486, "bao": 39487, "scrabble": 39488, "omni": 39489, "ffl": 39490, "umc": 39491, "simba": 39492, "alier": 39493, "terrell": 39494, "plume": 39495, "midi": 39496, "dignit": 39497, "coc": 39498, "brut": 39499, "adata": 39500, "alchemy": 39501, "dsm": 39502, "ðŁĺĨðŁĺĨ": 39503, "wintry": 39504, "spares": 39505, "cuer": 39506, "conclusions": 39507, "toys": 39508, "odor": 39509, "flann": 39510, "garvey": 39511, "scriptions": 39512, "inspections": 39513, "catap": 39514, "anglo": 39515, "stlouis": 39516, "heimer": 39517, "atay": 39518, "trich": 39519, "enyc": 39520, "childs": 39521, "ventil": 39522, "montp": 39523, "guillermo": 39524, "circulare": 39525, "zell": 39526, "modeled": 39527, "craftsman": 39528, "alina": 39529, "stimulation": 39530, "cashew": 39531, "judas": 39532, "bestof": 39533, "toire": 39534, "suspends": 39535, "scollege": 39536, "realising": 39537, "bytes": 39538, "bloods": 39539, "assi": 39540, "ðŁē¿": 39541, "ohs": 39542, "ðŁįĭ": 39543, "scallop": 39544, "व": 39545, "gifting": 39546, "camogie": 39547, "wilkes": 39548, "ozzy": 39549, "ðŁ¤¤": 39550, "veronic": 39551, "savoy": 39552, "demetri": 39553, "babygirl": 39554, "ðŁĺįðŁĺŃ": 39555, "sox": 39556, "clyde": 39557, "inductee": 39558, "countdown": 39559, "selfcare": 39560, "à¤ľ": 39561, "vika": 39562, "torre": 39563, "phdchat": 39564, "pears": 39565, "awh": 39566, "suffrage": 39567, "lesn": 39568, "admiration": 39569, "mpp": 39570, "sharkweek": 39571, "schulz": 39572, "santorini": 39573, "clover": 39574, "(*": 39575, "strasbourg": 39576, "exiting": 39577, "soyu": 39578, "fingerprint": 39579, "chea": 39580, "ãĢľ": 39581, "vindic": 39582, "songwriters": 39583, "soa": 39584, "prouder": 39585, "nama": 39586, "=))": 39587, "simplest": 39588, "deliciously": 39589, "gilles": 39590, "uq": 39591, "mnwx": 39592, "epp": 39593, "shun": 39594, "kennel": 39595, "fallon": 39596, "ðŁIJ£": 39597, "sind": 39598, "tragically": 39599, "outes": 39600, "modernism": 39601, "coke": 39602, "gyn": 39603, "spion": 39604, "âĺ¹ï¸ı": 39605, "leam": 39606, "compressor": 39607, "apologise": 39608, "twentyon": 39609, "fanatics": 39610, "âĻ»": 39611, "scotsman": 39612, "sawa": 39613, "kou": 39614, "aser": 39615, "à¸ļ": 39616, "welterweight": 39617, "phenom": 39618, "twickenham": 39619, "stria": 39620, "pout": 39621, "kaz": 39622, "giam": 39623, "cdp": 39624, "hoy": 39625, "employ": 39626, "redmond": 39627, "à¸Ħà¸": 39628, "smere": 39629, "trancefamily": 39630, "protocols": 39631, "piece": 39632, "luiz": 39633, "iteracy": 39634, "carls": 39635, "unitedstates": 39636, "harmed": 39637, "phdlife": 39638, "chaw": 39639, "footprints": 39640, "lé": 39641, "choker": 39642, "zana": 39643, "slipper": 39644, "ericsson": 39645, "insulting": 39646, "artichoke": 39647, "advising": 39648, "acquisitions": 39649, "opor": 39650, "mutations": 39651, "rear": 39652, "à„ģ": 39653, "podcast": 39654, "wither": 39655, "kung": 39656, "Ć­ÄŗĀø": 39657, "winslow": 39658, "diapers": 39659, "ðŁĵ¸@": 39660, "ecker": 39661, "collar": 39662, "huey": 39663, "giro": 39664, "monogram": 39665, "kasich": 39666, "siveness": 39667, "malaysi": 39668, "aromatic": 39669, "gres": 39670, "galileo": 39671, "uji": 39672, "robb": 39673, "drm": 39674, "nonetheless": 39675, "asa": 39676, ":>": 39677, "loa": 39678, "lnp": 39679, "atwork": 39680, "agt": 39681, "lakshmi": 39682, "pipelines": 39683, "idal": 39684, "strel": 39685, "reall": 39686, "chainz": 39687, "stonewall": 39688, "sansk": 39689, "ðŁı“": 39690, "piedmont": 39691, "hostess": 39692, "ciu": 39693, "té": 39694, "analyses": 39695, "wilhelm": 39696, "scotty": 39697, "rwby": 39698, "mosquit": 39699, "usemb": 39700, "quins": 39701, "ðŁijİ": 39702, "tucker": 39703, "sconf": 39704, "specifications": 39705, "psychiatry": 39706, "brookes": 39707, "sils": 39708, "olaf": 39709, "deto": 39710, "codi": 39711, "clip": 39712, "filth": 39713, "womancrushwednesday": 39714, "goto": 39715, "angerous": 39716, "beale": 39717, "wtc": 39718, "panelist": 39719, "nex": 39720, "larsen": 39721, "emilio": 39722, "tableau": 39723, "hitters": 39724, "conceived": 39725, "americani": 39726, "ortega": 39727, "mardi": 39728, "ƑĄ": 39729, "paintball": 39730, "thirsty": 39731, "newyorker": 39732, "etisation": 39733, "goss": 39734, "weaker": 39735, "ugh": 39736, "troll": 39737, "harga": 39738, "dual": 39739, "ghtning": 39740, "atine": 39741, "ðŁĺİðŁĺİðŁĺİ": 39742, "cookout": 39743, "pyrenees": 39744, "poss": 39745, "authentication": 39746, "sportswear": 39747, "yunho": 39748, "kiro": 39749, "archipel": 39750, "shenko": 39751, "render": 39752, "novation": 39753, "divinity": 39754, "ðŁij£": 39755, "sufi": 39756, "humbling": 39757, "geopol": 39758, "devotees": 39759, "waitress": 39760, "trough": 39761, "pyro": 39762, "iba": 39763, "bling": 39764, "graf": 39765, "epilots": 39766, "btr": 39767, "oftball": 39768, "basking": 39769, "dominos": 39770, "soom": 39771, "rath": 39772, "sheryl": 39773, "quel": 39774, "astronomical": 39775, "weld": 39776, "tracklist": 39777, "signee": 39778, "sleepless": 39779, "comman": 39780, "chron": 39781, "summon": 39782, "puremichigan": 39783, "crispr": 39784, "slip": 39785, "lagi": 39786, "raq": 39787, "umu": 39788, "thalap": 39789, "charmed": 39790, "scrump": 39791, "quadcopter": 39792, "skip": 39793, "petersen": 39794, "muni": 39795, "ðŁĮ¾": 39796, "monaghan": 39797, "trays": 39798, "icked": 39799, "canadaday": 39800, "tegr": 39801, "�": 39802, "hotness": 39803, "heavymetal": 39804, "abar": 39805, "gopdebate": 39806, "azul": 39807, "spiderman": 39808, "sunflowers": 39809, "ľë": 39810, "webcomics": 39811, "bard": 39812, "в": 39813, "nicholas": 39814, "slush": 39815, "raman": 39816, "markham": 39817, "fficial": 39818, "ffler": 39819, "íĬ¸": 39820, "pless": 39821, "anushka": 39822, "toto": 39823, "skaters": 39824, "prowrestling": 39825, "competes": 39826, "ayala": 39827, "mystery": 39828, "thrills": 39829, "mpg": 39830, "independently": 39831, "yul": 39832, "imperative": 39833, "formidable": 39834, "tireless": 39835, "stacking": 39836, "tongues": 39837, "maltese": 39838, "potts": 39839, "matti": 39840, "charting": 39841, "chillout": 39842, "supernova": 39843, "omeo": 39844, "skysports": 39845, "nutty": 39846, "ðŁĹĵï¸ı": 39847, "rohan": 39848, "inspired": 39849, "concierge": 39850, "serra": 39851, "makk": 39852, "galat": 39853, "chipp": 39854, "yev": 39855, "ì£": 39856, "reimbur": 39857, "opul": 39858, "kimberley": 39859, "ieee": 39860, "bremen": 39861, "chitec": 39862, "orin": 39863, "naku": 39864, "bonkers": 39865, "footy": 39866, "emergence": 39867, "ðŁĨĺ": 39868, "stip": 39869, "sergei": 39870, "zoey": 39871, "aime": 39872, "would": 39873, "dyes": 39874, "destiny": 39875, "vinaigrette": 39876, "drier": 39877, "circulareconomy": 39878, "anarchi": 39879, "ssr": 39880, "schel": 39881, "ciner": 39882, "groom": 39883, "determining": 39884, "garmin": 39885, "calais": 39886, "incarceration": 39887, "bukit": 39888, "noi": 39889, "chelmsford": 39890, "mckinley": 39891, "chipped": 39892, "belonged": 39893, "tumors": 39894, "stroud": 39895, "mii": 39896, "influenza": 39897, "wwenxt": 39898, "tundra": 39899, "telecommunications": 39900, "catsofinstagram": 39901, "tages": 39902, "beatty": 39903, "odu": 39904, "mlkday": 39905, "ooper": 39906, "dangle": 39907, "akley": 39908, "crumb": 39909, "antigua": 39910, "timbers": 39911, "rouhani": 39912, "ðŁēªðŁēªðŁēª": 39913, "hafi": 39914, "...!!": 39915, "wcs": 39916, "coop": 39917, "snc": 39918, "litres": 39919, "ãĢĬ": 39920, "haz": 39921, "coz": 39922, "kant": 39923, "greenfield": 39924, "curti": 39925, "yale": 39926, "flyeagles": 39927, "whatsoever": 39928, "worthing": 39929, "roulette": 39930, "flyeaglesfly": 39931, "unda": 39932, "ainted": 39933, "standing": 39934, "luscious": 39935, "hpc": 39936, "efficacy": 39937, "ashland": 39938, "meghan": 39939, "kywx": 39940, "npr": 39941, "bathtub": 39942, "acos": 39943, "hani": 39944, "marcor": 39945, "mantis": 39946, "daisi": 39947, "boba": 39948, "abbie": 39949, "mutil": 39950, "vial": 39951, "spyder": 39952, "poz": 39953, "gti": 39954, "elfie": 39955, "nightw": 39956, "metroid": 39957, "antoni": 39958, "maddie": 39959, "dhry": 39960, "darlings": 39961, "tends": 39962, "taekwondo": 39963, "atlanta": 39964, "meow": 39965, "chloe": 39966, "ãĄİ": 39967, "ymes": 39968, "siberia": 39969, "kcon": 39970, "gues": 39971, "mariner": 39972, "facil": 39973, "azzle": 39974, "[...": 39975, "hannover": 39976, "bavaria": 39977, "virgo": 39978, "teuk": 39979, "usps": 39980, ")#": 39981, "walla": 39982, "sampson": 39983, "needless": 39984, "verbally": 39985, "hayley": 39986, "bowled": 39987, "pius": 39988, "lampard": 39989, "hamstring": 39990, "volvo": 39991, "roadsafety": 39992, "choking": 39993, "sorbet": 39994, "ahem": 39995, "healthyfood": 39996, "braided": 39997, "horticulture": 39998, "crative": 39999, "cheek": 40000, "addo": 40001, "theforce": 40002, "koko": 40003, "schizoph": 40004, "jie": 40005, "wada": 40006, "twentyonepilots": 40007, "hbcu": 40008, "proton": 40009, "pauls": 40010, "louisa": 40011, "latam": 40012, "kyrgy": 40013, "compac": 40014, "sdk": 40015, "sapi": 40016, "???": 40017, "liberalism": 40018, "epsilon": 40019, "aiden": 40020, "wusa": 40021, "sprayed": 40022, "basketball": 40023, "kimono": 40024, "bluewave": 40025, "alias": 40026, "ë§Ī": 40027, "mugshot": 40028, "cec": 40029, "dogre": 40030, "adora": 40031, "ðŁĵ·@": 40032, "krakow": 40033, "intrigued": 40034, "exhausting": 40035, "astronomer": 40036, "venison": 40037, "ladybug": 40038, "civ": 40039, "brae": 40040, "usm": 40041, "bribe": 40042, "acupuncture": 40043, "pembroke": 40044, "keating": 40045, "chie": 40046, "yad": 40047, "tsi": 40048, "smi": 40049, "seeding": 40050, "gateshead": 40051, "lisboa": 40052, "gyp": 40053, "canvass": 40054, "ðŁĶ“âļªï¸ı": 40055, "opi": 40056, "nir": 40057, "societal": 40058, "lyte": 40059, "aties": 40060, "csm": 40061, "artery": 40062, "alin": 40063, "akapoor": 40064, "abstracts": 40065, "â̦â̦": 40066, "teenwolf": 40067, "newe": 40068, "travelgram": 40069, "sentimental": 40070, "perched": 40071, "handel": 40072, "hoek": 40073, "fay": 40074, "coordinating": 40075, "animate": 40076, "manian": 40077, "effort": 40078, "jerky": 40079, "fck": 40080, "adrienne": 40081, "mably": 40082, "trading": 40083, "myel": 40084, "spiro": 40085, "sola": 40086, "storing": 40087, "overdrive": 40088, "mondaymorning": 40089, "dreamteam": 40090, "pulse": 40091, "bondi": 40092, "bernie": 40093, "pgatour": 40094, "tripoli": 40095, "sonam": 40096, "platt": 40097, "âļ”": 40098, "agroup": 40099, "îIJē": 40100, "invading": 40101, "vcu": 40102, "kell": 40103, "ños": 40104, "undead": 40105, "podcasting": 40106, "mercedesam": 40107, "manafort": 40108, "cortex": 40109, "queso": 40110, "impeccable": 40111, "palmer": 40112, "wildoz": 40113, "sportsc": 40114, "guacamole": 40115, "dispenser": 40116, "categori": 40117, "stunts": 40118, "peril": 40119, "invitations": 40120, "dunedin": 40121, "xie": 40122, "achieves": 40123, "safer": 40124, "preds": 40125, "phan": 40126, "knuckles": 40127, "kak": 40128, "ignores": 40129, "lovemyjob": 40130, "aruba": 40131, "oundation": 40132, "datacenter": 40133, "covert": 40134, "gring": 40135, "couple": 40136, "ار": 40137, "voli": 40138, "mccle": 40139, "artisans": 40140, "ludo": 40141, "kalam": 40142, "aroma": 40143, "undertaker": 40144, "hula": 40145, "wizkid": 40146, "gumb": 40147, "godfrey": 40148, "bakersfield": 40149, "kern": 40150, "engineer": 40151, "carve": 40152, "palin": 40153, "guarantees": 40154, "pebbles": 40155, "bays": 40156, "zieg": 40157, "fink": 40158, "â¬ĩï¸ıâ¬ĩï¸ı": 40159, "downpours": 40160, "rochelle": 40161, "raspberry": 40162, "ðŁĺ®": 40163, "graphies": 40164, "stomp": 40165, "cafes": 40166, "arized": 40167, "uttar": 40168, "calvary": 40169, "drie": 40170, "crusader": 40171, "busan": 40172, "tuxedo": 40173, "siu": 40174, "seamus": 40175, "cultured": 40176, "blanchard": 40177, "townhouse": 40178, "gered": 40179, "buttermilk": 40180, "fluctu": 40181, "rogerfederer": 40182, "heli": 40183, "ðŁ¦Ą": 40184, "uous": 40185, "ramesh": 40186, "muppets": 40187, "emailmarketing": 40188, "yess": 40189, "brice": 40190, "rizio": 40191, "pelo": 40192, "donneinarte": 40193, "urable": 40194, "investin": 40195, "bumping": 40196, "rajiv": 40197, "sava": 40198, "thrower": 40199, "forex": 40200, "ohhhh": 40201, "thrust": 40202, "pullman": 40203, "rfid": 40204, "sepsis": 40205, "leed": 40206, "fright": 40207, "rounding": 40208, "neb": 40209, "phins": 40210, "aisha": 40211, "utilizing": 40212, "squats": 40213, "goldsmith": 40214, "jic": 40215, "boks": 40216, "vaus": 40217, "ipo": 40218, "exclusion": 40219, "tariff": 40220, "pokes": 40221, "minal": 40222, "lands": 40223, "enforce": 40224, "washingtondc": 40225, "orchar": 40226, "gx": 40227, "marys": 40228, "eyour": 40229, "aussie": 40230, "bakers": 40231, "unpopular": 40232, "latinos": 40233, "large": 40234, "putnam": 40235, "bolo": 40236, "wade": 40237, "pelo": 40238, "dizz": 40239, "obstruction": 40240, "flappy": 40241, "wearethe": 40242, "dependence": 40243, "pajama": 40244, "ete": 40245, "yann": 40246, "ewan": 40247, "discla": 40248, "aay": 40249, "karina": 40250, "eic": 40251, "antrim": 40252, "wsoc": 40253, "negatively": 40254, "kaido": 40255, "fotografia": 40256, "dhru": 40257, "colossal": 40258, "mcleod": 40259, "kwang": 40260, "manipu": 40261, "exhilar": 40262, "usatoday": 40263, "summerslam": 40264, "coles": 40265, "taproom": 40266, "unbeatable": 40267, "dema": 40268, "ticks": 40269, "kling": 40270, "fils": 40271, "campaigners": 40272, "Ć ĀøÄ·": 40273, "brewster": 40274, "audubon": 40275, "quay": 40276, "chs": 40277, "kigali": 40278, "dler": 40279, "strengthens": 40280, "somal": 40281, "signingday": 40282, "golds": 40283, "pigment": 40284, "orchestral": 40285, "gq": 40286, "linkin": 40287, "ðŁıĩ": 40288, "taw": 40289, "algarve": 40290, "hov": 40291, "earle": 40292, "goldfish": 40293, "amig": 40294, "exer": 40295, "benin": 40296, "druid": 40297, "ðŁIJ¸": 40298, "shem": 40299, "quattro": 40300, "mercen": 40301, "mente": 40302, "incorporating": 40303, "bonanza": 40304, "statefair": 40305, "ende": 40306, "conceptions": 40307, "ees": 40308, "âĻ„ï¸ıâĻ„ï¸ı": 40309, "dson": 40310, "firearm": 40311, "orbital": 40312, "weh": 40313, "multip": 40314, "fob": 40315, "requiem": 40316, "plight": 40317, "thouse": 40318, "said": 40319, "ocre": 40320, "remembrance": 40321, "nold": 40322, "chipping": 40323, "bev": 40324, "ert": 40325, "cathy": 40326, "sym": 40327, "riggs": 40328, "mley": 40329, "dialogues": 40330, "slender": 40331, "howl": 40332, "gauteng": 40333, "wdw": 40334, "tobi": 40335, "smokes": 40336, "implo": 40337, "bpm": 40338, "adn": 40339, "mombasa": 40340, "capsul": 40341, "bloomfield": 40342, "articul": 40343, "cleo": 40344, "googled": 40345, "fluffy": 40346, "lard": 40347, "enzyme": 40348, "vesti": 40349, "ibrahi": 40350, "flame": 40351, "emea": 40352, "outages": 40353, "dispropor": 40354, "bleak": 40355, "ansel": 40356, "icker": 40357, "stlouis": 40358, "stockmarket": 40359, "goodfriday": 40360, "sault": 40361, "stalled": 40362, "prom": 40363, "epsom": 40364, "bé": 40365, "these": 40366, "sauces": 40367, "mew": 40368, "litfest": 40369, "pred": 40370, "reu": 40371, "karak": 40372, "sienna": 40373, "ellin": 40374, "biotechnology": 40375, "ï¸ıâĄ£-": 40376, "tactic": 40377, "sain": 40378, "pork": 40379, "monza": 40380, "kaj": 40381, "lush": 40382, "compartment": 40383, "changing": 40384, "shraddhakapoor": 40385, "foal": 40386, "artem": 40387, "cuando": 40388, "canola": 40389, "oriente": 40390, "messe": 40391, "dited": 40392, "brc": 40393, "boxer": 40394, "bbctwo": 40395, "sst": 40396, "mentday": 40397, "eming": 40398, "dewey": 40399, "kofi": 40400, "âŀĸâŀĸâŀĸâŀĸ": 40401, "realization": 40402, "smol": 40403, "twood": 40404, "sanje": 40405, "flagstaff": 40406, "berwick": 40407, "corset": 40408, "canary": 40409, "whistleblower": 40410, "etched": 40411, "composing": 40412, "squeezed": 40413, "bower": 40414, "autodesk": 40415, "neh": 40416, "mathieu": 40417, "baja": 40418, "ƅĤ": 40419, "hydra": 40420, "daim": 40421, "ameri": 40422, "insisted": 40423, "merlot": 40424, "garros": 40425, "heartnews": 40426, "gainesville": 40427, "cutler": 40428, "bode": 40429, "ðŁĺīðŁĺī": 40430, "lewes": 40431, "scountry": 40432, "gsa": 40433, "usu": 40434, "ccm": 40435, "godawgs": 40436, "pharaoh": 40437, "crae": 40438, "morley": 40439, "hypnoti": 40440, "fades": 40441, "neurons": 40442, "fuzz": 40443, "ingco": 40444, "highlanders": 40445, "stark": 40446, "vigne": 40447, "packets": 40448, "amarillo": 40449, "reuben": 40450, "insults": 40451, "basic": 40452, "vector": 40453, "nme": 40454, "acruz": 40455, "tros": 40456, "transmitter": 40457, "ðŁĺŀ": 40458, "interpret": 40459, "ðŁĺ²": 40460, "prequel": 40461, "mcgowan": 40462, "dissemin": 40463, "ðŁēĺðŁēĺ": 40464, "masculinity": 40465, "indiegamedev": 40466, "alive": 40467, "tet": 40468, "petal": 40469, "emailed": 40470, "armed": 40471, "koo": 40472, "heer": 40473, "baird": 40474, "superjunior": 40475, "metropolis": 40476, "delavin": 40477, "declines": 40478, "stitutes": 40479, "Ûģ": 40480, "ptbo": 40481, "glan": 40482, "chores": 40483, "ealing": 40484, "chrissy": 40485, "stemc": 40486, "vian": 40487, "assassinated": 40488, "pronounce": 40489, "illegals": 40490, "discovery": 40491, "cavill": 40492, "frifotos": 40493, "fal": 40494, "soi": 40495, "sabotage": 40496, "tint": 40497, "pdc": 40498, "ðŁİīðŁİĪ": 40499, "ãĤĬãģ": 40500, "jio": 40501, "endeavor": 40502, "insig": 40503, "committees": 40504, "shearer": 40505, "metz": 40506, "marrying": 40507, "hdd": 40508, "gby": 40509, "fret": 40510, "trish": 40511, "pul": 40512, "scripted": 40513, "saki": 40514, "lw": 40515, "keye": 40516, "shimi": 40517, "nanaimo": 40518, "cah": 40519, "ë": 40520, "tempered": 40521, "ician": 40522, "dugg": 40523, "dishwasher": 40524, "airfield": 40525, "srugby": 40526, "grinch": 40527, "yst": 40528, "rms": 40529, "mahatma": 40530, "lankan": 40531, "discar": 40532, "digestion": 40533, "nodes": 40534, "lls": 40535, "omic": 40536, "gutter": 40537, "tisgarh": 40538, "federico": 40539, "electionday": 40540, "bohe": 40541, "mastercard": 40542, "fireball": 40543, "âľĶï¸ı": 40544, "oyster": 40545, "pong": 40546, "dok": 40547, "enroute": 40548, "mvc": 40549, "beatthe": 40550, "alistair": 40551, "shub": 40552, "shaming": 40553, "chernobyl": 40554, "ghibli": 40555, "thes": 40556, "pinion": 40557, "dbs": 40558, "salts": 40559, "iction": 40560, "epiph": 40561, "ncpol": 40562, "inconvenience": 40563, "whitley": 40564, "inspecting": 40565, "woodley": 40566, "wiener": 40567, "skillet": 40568, "noles": 40569, "mca": 40570, "hina": 40571, "asha": 40572, "willingness": 40573, "wellness": 40574, "tamed": 40575, "showtime": 40576, "disadvantaged": 40577, "bernat": 40578, "usn": 40579, "missionaries": 40580, "counselling": 40581, "arrogant": 40582, "quantitative": 40583, "legalization": 40584, "hodge": 40585, "energyefficiency": 40586, "camerondallas": 40587, "possessions": 40588, "pbb": 40589, "harrisburg": 40590, "vg": 40591, "hinduism": 40592, "happythanksgiving": 40593, "fib": 40594, "reacting": 40595, "tweetapicture": 40596, "politi": 40597, "muppet": 40598, "hurrah": 40599, "pace": 40600, "coastguard": 40601, "guarded": 40602, "asam": 40603, "parry": 40604, "forevery": 40605, "xq": 40606, "oomf": 40607, "keanu": 40608, "jind": 40609, "rist": 40610, "customerservice": 40611, "sacred": 40612, "ðŁĺº": 40613, "toner": 40614, "occurrence": 40615, "matu": 40616, "valdez": 40617, "redd": 40618, "isak": 40619, "powerrangers": 40620, "peasant": 40621, "rajini": 40622, "abraham": 40623, "emil": 40624, "cardo": 40625, "tril": 40626, "hairstyles": 40627, "obsolete": 40628, "sampler": 40629, "directive": 40630, "delavinkisses": 40631, "verton": 40632, "glos": 40633, "spay": 40634, "palermo": 40635, "comets": 40636, "manziel": 40637, "chicagof": 40638, "skipped": 40639, "pictorial": 40640, "hant": 40641, "bmi": 40642, "aol": 40643, "reopens": 40644, "paddling": 40645, "devos": 40646, "fraud": 40647, "baseline": 40648, "queues": 40649, "spired": 40650, "snare": 40651, "euve": 40652, "descriptions": 40653, "daisies": 40654, "caching": 40655, "galleria": 40656, "trimmed": 40657, "stino": 40658, "recycla": 40659, "icular": 40660, "birken": 40661, "rawlings": 40662, "flix": 40663, "chicas": 40664, "bgt": 40665, "likeli": 40666, "argyll": 40667, "thelove": 40668, "gaston": 40669, "blanca": 40670, "hak": 40671, "fone": 40672, "sailormoon": 40673, "haci": 40674, "imac": 40675, "flyn": 40676, "decan": 40677, "belles": 40678, "apic": 40679, "zog": 40680, "taunton": 40681, "constance": 40682, "lasagna": 40683, "kernel": 40684, "inka": 40685, "harbor": 40686, "collectively": 40687, "calculated": 40688, "aville": 40689, "shilpa": 40690, "purdu": 40691, "gimm": 40692, "funer": 40693, "aest": 40694, "pembrokeshire": 40695, "nightingale": 40696, "nunes": 40697, "hypertension": 40698, "hubert": 40699, "sliders": 40700, "infertility": 40701, "commended": 40702, "transatlantic": 40703, "metrical": 40704, "!!@": 40705, "ÅŁ": 40706, "ssg": 40707, "bacca": 40708, "inverted": 40709, "funfactfriday": 40710, "itans": 40711, "album": 40712, "acquainted": 40713, "rier": 40714, "whelan": 40715, "sarab": 40716, "mue": 40717, "snooze": 40718, "piff": 40719, "agreeing": 40720, "spitting": 40721, "jermaine": 40722, "nye": 40723, "âľıï¸ı": 40724, "ambush": 40725, "zeph": 40726, "congreg": 40727, "university": 40728, "sapp": 40729, "wannabe": 40730, "patrice": 40731, "ibd": 40732, "doglo": 40733, "fridges": 40734, "sund": 40735, "kingston": 40736, "argon": 40737, "kamen": 40738, "hardrock": 40739, "dsley": 40740, "dolores": 40741, "ì°": 40742, "otaku": 40743, "piping": 40744, "behaving": 40745, "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, "bluebird": 40747, "ansari": 40748, "teapot": 40749, "firework": 40750, "crop": 40751, "logans": 40752, "typed": 40753, "thickness": 40754, "igers": 40755, "cfp": 40756, "dysfunctional": 40757, "contrasting": 40758, "etty": 40759, "astonmartin": 40760, "txst": 40761, "dragrace": 40762, "attributes": 40763, "marathon": 40764, "manuscripts": 40765, "johnstone": 40766, "ðŁĺ±ðŁĺ±": 40767, "boer": 40768, "ayu": 40769, "arugula": 40770, "poorest": 40771, "condu": 40772, "assumption": 40773, "anagh": 40774, "noh": 40775, "delavin": 40776, "sitter": 40777, "gö": 40778, "morow": 40779, "kickstart": 40780, "comi": 40781, "glacial": 40782, "ghead": 40783, "bain": 40784, "kershaw": 40785, "endof": 40786, "freud": 40787, "omat": 40788, "iaf": 40789, "hug": 40790, "signup": 40791, "eachother": 40792, "definite": 40793, "tubing": 40794, "shakira": 40795, "ðŁijıðŁı½": 40796, "uuuu": 40797, "swin": 40798, "shambles": 40799, "olas": 40800, "skell": 40801, "britain": 40802, "knw": 40803, "clutter": 40804, "omy": 40805, "jens": 40806, "hanged": 40807, "cityscape": 40808, "scraps": 40809, "unlocking": 40810, "deadliest": 40811, "erno": 40812, "breastcancer": 40813, "ait": 40814, "inspect": 40815, "furi": 40816, "ðŁēĮ": 40817, "kud": 40818, "jule": 40819, "orah": 40820, "mids": 40821, "mdt": 40822, "burgring": 40823, "rattle": 40824, "pusa": 40825, "stalk": 40826, "cleans": 40827, "issance": 40828, "zek": 40829, "worthit": 40830, "nameis": 40831, "muskoka": 40832, "councilman": 40833, "urbanart": 40834, "barrac": 40835, "unsolved": 40836, "tul": 40837, "gita": 40838, "whiteboard": 40839, "soybeans": 40840, "ement": 40841, "conti": 40842, "saturdaymotivation": 40843, "conveniently": 40844, "docking": 40845, "tado": 40846, "âı©": 40847, "spino": 40848, "puppylove": 40849, "pof": 40850, "fabricated": 40851, "robbers": 40852, "adopts": 40853, "tified": 40854, "kkr": 40855, "indulgence": 40856, "noticeable": 40857, "macquarie": 40858, "chapel": 40859, "sensual": 40860, "kiko": 40861, "melanoma": 40862, "loretta": 40863, "liance": 40864, "aben": 40865, "splus": 40866, "gaal": 40867, "acele": 40868, "libdems": 40869, "comparisons": 40870, "ðŁĮµ": 40871, "rhythms": 40872, "mery": 40873, "encapsul": 40874, "napier": 40875, "ðŁijĮðŁijĮðŁijĮ": 40876, "ðŁijIJ": 40877, "platz": 40878, "fresno": 40879, "reformed": 40880, "ranbir": 40881, "elit": 40882, "thebest": 40883, "bhushan": 40884, "vinnie": 40885, "improvised": 40886, "sittin": 40887, "recreated": 40888, "eba": 40889, "ecker": 40890, "acrob": 40891, "ponte": 40892, "cord": 40893, "giddy": 40894, "eurusd": 40895, "fever": 40896, "intuition": 40897, "gari": 40898, "dummies": 40899, "budweiser": 40900, "amendments": 40901, "tetra": 40902, "schnit": 40903, "ayas": 40904, "marys": 40905, "cist": 40906, "kani": 40907, "kermit": 40908, "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, "tinker": 40910, "strolling": 40911, "divisional": 40912, "nigeri": 40913, "ominous": 40914, "menstrual": 40915, "karab": 40916, "khy": 40917, "bwfc": 40918, "panhandle": 40919, "lilli": 40920, "weller": 40921, "strapped": 40922, "sonthe": 40923, "transferring": 40924, "ethereal": 40925, "sneaks": 40926, "rudol": 40927, "gables": 40928, "jacking": 40929, "cincode": 40930, "fortune": 40931, "canadiens": 40932, "confor": 40933, "abnormal": 40934, "franklin": 40935, "tita": 40936, "mula": 40937, "persist": 40938, "cuties": 40939, "kiel": 40940, "ðŁĩ±ðŁĩ": 40941, "hermann": 40942, "awk": 40943, "fiasco": 40944, "koto": 40945, "weta": 40946, "hiker": 40947, "buddy": 40948, "preventive": 40949, "mcgraw": 40950, "gameboy": 40951, "forsyth": 40952, "topshop": 40953, "siob": 40954, "sadh": 40955, "intram": 40956, "followart": 40957, "soaps": 40958, "dragonball": 40959, "oux": 40960, "morrison": 40961, "à¹Ą": 40962, "lubric": 40963, "adulthood": 40964, "morrisons": 40965, "âļłï¸ı": 40966, "hermo": 40967, "taka": 40968, "stallone": 40969, "misuse": 40970, "teamgb": 40971, "ragha": 40972, "confined": 40973, "aty": 40974, "homophobic": 40975, "nwo": 40976, "skynews": 40977, "hoya": 40978, "acrosse": 40979, "wiiu": 40980, "purée": 40981, "jeddah": 40982, "ðŁ¤§": 40983, "advisers": 40984, "phine": 40985, "anis": 40986, "scrumptious": 40987, "ë°ķ": 40988, "cke": 40989, "viny": 40990, "term": 40991, "sdc": 40992, "odo": 40993, "homeschool": 40994, "vasc": 40995, "leopards": 40996, "deborah": 40997, "illicit": 40998, "curran": 40999, "asroma": 41000, "naught": 41001, "marig": 41002, "brandi": 41003, "emp": 41004, "ðŁĺįðŁijĮ": 41005, "ƮĮ": 41006, "suspend": 41007, "luz": 41008, "initiation": 41009, "schaft": 41010, "jensenackles": 41011, "crawler": 41012, "postdoc": 41013, "desks": 41014, "trailblazer": 41015, "denomin": 41016, "trix": 41017, "noise": 41018, "poet": 41019, "±ï¸ı": 41020, "smug": 41021, "volatile": 41022, "proofs": 41023, "pharmacist": 41024, "sardinia": 41025, "mashable": 41026, "kimchi": 41027, "coed": 41028, "schalke": 41029, "doodled": 41030, "csw": 41031, "shur": 41032, "rox": 41033, "dok": 41034, "chrisbrown": 41035, "mathematician": 41036, "abound": 41037, "angelic": 41038, "rockford": 41039, "dole": 41040, "yorkers": 41041, "msn": 41042, "gman": 41043, "xavier": 41044, "borrowing": 41045, "markings": 41046, "longhorn": 41047, "kja": 41048, "diverted": 41049, "mmit": 41050, "euphoria": 41051, "ayyy": 41052, "tea": 41053, "pah": 41054, "cki": 41055, "uncut": 41056, "liven": 41057, "kyung": 41058, "fanart": 41059, "mering": 41060, "redding": 41061, "amovie": 41062, "gridi": 41063, "cthulhu": 41064, "scholarly": 41065, "judah": 41066, "thbewithyou": 41067, "eucalyp": 41068, "ðŁIJķ": 41069, "hertfordshire": 41070, "courtroom": 41071, "byu": 41072, "auctioned": 41073, "please": 41074, "marcia": 41075, "ê°ĵ": 41076, "succeeded": 41077, "elas": 41078, "arvind": 41079, "tlot": 41080, "saigon": 41081, "rett": 41082, "rakesh": 41083, "fdny": 41084, "asen": 41085, "sebring": 41086, "gladiators": 41087, "youknow": 41088, "vlad": 41089, "gola": 41090, "parap": 41091, "ÑĢи": 41092, "sabcnews": 41093, "oneteam": 41094, "ohl": 41095, "sune": 41096, "rij": 41097, "cdc": 41098, "stargate": 41099, "rundown": 41100, "plato": 41101, "phc": 41102, "chatter": 41103, "raviol": 41104, "mnf": 41105, "mandala": 41106, "liet": 41107, "Ć ĀøÄ·": 41108, "maria": 41109, "hungover": 41110, "consolidation": 41111, "ferrell": 41112, "traditional": 41113, "iloveart": 41114, "galap": 41115, "ðŁıĮ": 41116, "quezon": 41117, "españa": 41118, "ðŁĩ¨ðŁĩŃ": 41119, "hobby": 41120, "steamboat": 41121, "malign": 41122, "guillau": 41123, "prohi": 41124, "itsme": 41125, "íĄĢ": 41126, "inscription": 41127, "alz": 41128, "marian": 41129, "kade": 41130, "mmon": 41131, "adjusting": 41132, "nests": 41133, "internally": 41134, "cir": 41135, "vikram": 41136, "malala": 41137, "kph": 41138, "felicia": 41139, "thereal": 41140, "captivity": 41141, "atis": 41142, "marcorubio": 41143, "kaleido": 41144, "chev": 41145, "manoj": 41146, "lemore": 41147, "gentri": 41148, "vips": 41149, "trope": 41150, "\"âĢĶ": 41151, "pairings": 41152, "malnutrition": 41153, "fray": 41154, "designation": 41155, "brunomars": 41156, "aze": 41157, "torrential": 41158, "panzer": 41159, "gail": 41160, "underthe": 41161, "theological": 41162, "schizophre": 41163, "dazzle": 41164, "frederic": 41165, "mopar": 41166, "adilla": 41167, "soggy": 41168, "raun": 41169, "mediocre": 41170, "colorec": 41171, "ife": 41172, "pinst": 41173, "bluef": 41174, "²": 41175, "worldwater": 41176, "giroud": 41177, "clarinet": 41178, "adolf": 41179, "tarantino": 41180, "receipts": 41181, "assump": 41182, "ðŁijŁ": 41183, "coffees": 41184, "âľĬðŁı¾": 41185, "duplex": 41186, "sof": 41187, "rx": 41188, "lino": 41189, "timberwolves": 41190, "pandit": 41191, "motm": 41192, "ega": 41193, "ayama": 41194, "achs": 41195, "outsider": 41196, "llen": 41197, "coer": 41198, "tilly": 41199, "cheeseburger": 41200, "mads": 41201, "pledis": 41202, "empty": 41203, "nationalparks": 41204, "aziz": 41205, "pmi": 41206, "junkies": 41207, "fener": 41208, "sqn": 41209, "ès": 41210, "generation": 41211, "cleopatra": 41212, "bhubanes": 41213, "mosques": 41214, "tyfree": 41215, "poppins": 41216, "twc": 41217, "orwell": 41218, "nage": 41219, "kawhi": 41220, "hollow": 41221, "dalai": 41222, "¨¨¨¨": 41223, "ouro": 41224, "mhealth": 41225, "gion": 41226, "azo": 41227, "visas": 41228, "renegade": 41229, "reic": 41230, "wsop": 41231, "ðŁēļðŁēĽ": 41232, "echel": 41233, "toxicity": 41234, "mün": 41235, "bunk": 41236, "stimulating": 41237, "asthour": 41238, "\\'": 41239, "eph": 41240, "endemic": 41241, "cnbc": 41242, "shrinking": 41243, "peabody": 41244, "michelangelo": 41245, "canyon": 41246, "wale": 41247, "sumi": 41248, "siders": 41249, "inuit": 41250, "?.": 41251, "professionalism": 41252, "dracing": 41253, "platoon": 41254, "pons": 41255, "outbound": 41256, "mapleleafs": 41257, "desol": 41258, "cency": 41259, "athan": 41260, "verma": 41261, "rubbing": 41262, "okan": 41263, "ðŁijł": 41264, "mullins": 41265, "authentic": 41266, "Ć…ÄÆ": 41267, "almanac": 41268, "gaia": 41269, "bbq": 41270, "onimo": 41271, "keh": 41272, "tya": 41273, "touts": 41274, "yav": 41275, "reposit": 41276, ",.": 41277, "wight": 41278, "seeyou": 41279, "callof": 41280, "donesia": 41281, "bargaining": 41282, "granth": 41283, "sdsu": 41284, "amphitheater": 41285, "psu": 41286, "rewatching": 41287, "winetasting": 41288, "peakdistrict": 41289, "detecting": 41290, "thurman": 41291, "phee": 41292, "ĆØĀŖÄ·": 41293, "umich": 41294, "rer": 41295, "sculpted": 41296, "gole": 41297, "namesake": 41298, "ðŁĶģ": 41299, "servicing": 41300, "baugh": 41301, "pugh": 41302, "pencil": 41303, "darth": 41304, "munchkin": 41305, "atorium": 41306, "teners": 41307, "suny": 41308, "rollingstones": 41309, "maging": 41310, "starrer": 41311, "idris": 41312, "feinstein": 41313, "agron": 41314, "âĺºï¸ıâĺºï¸ı": 41315, "supervised": 41316, "chameleon": 41317, "aggregate": 41318, "successive": 41319, "mogul": 41320, "instyle": 41321, "poldark": 41322, "custome": 41323, "ohiostate": 41324, "haya": 41325, "cides": 41326, "brokerage": 41327, "angelou": 41328, "fifawwc": 41329, "deforestation": 41330, "alton": 41331, "pamph": 41332, "hugged": 41333, "hobo": 41334, "changeable": 41335, "kuber": 41336, "burroughs": 41337, "demonetisation": 41338, "capecod": 41339, "versatility": 41340, "orice": 41341, "leila": 41342, "womeninscience": 41343, "tua": 41344, "hedges": 41345, "embarrassment": 41346, "alife": 41347, "soars": 41348, "nighter": 41349, "hymn": 41350, "gipp": 41351, "chasu": 41352, "techs": 41353, "niall": 41354, "killa": 41355, "hika": 41356, "camels": 41357, "value": 41358, "¢": 41359, "scoops": 41360, "mahmoud": 41361, "clusive": 41362, "adriana": 41363, "paco": 41364, "ozil": 41365, "unas": 41366, "translations": 41367, "whisperer": 41368, "sbi": 41369, "buxton": 41370, "biotics": 41371, "indiffe": 41372, "kenney": 41373, "klar": 41374, "etching": 41375, "barrabest": 41376, "instability": 41377, "seine": 41378, "votel": 41379, "blogged": 41380, "whiskey": 41381, "myspace": 41382, "tant": 41383, "landia": 41384, "giveback": 41385, "illus": 41386, "awak": 41387, "acab": 41388, "fbloggers": 41389, "cloudcomputing": 41390, "blatant": 41391, "syrians": 41392, "bandra": 41393, "styn": 41394, "anem": 41395, "keted": 41396, "karthik": 41397, "barunsob": 41398, "pinot": 41399, "gubernat": 41400, "gaye": 41401, "artiste": 41402, "ified": 41403, "conventions": 41404, "huan": 41405, "geniuses": 41406, "eeeeee": 41407, "folly": 41408, "somerville": 41409, "pridemonth": 41410, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, "chemotherapy": 41412, "pauls": 41413, "bakar": 41414, "ìĦ¸ë¸IJ": 41415, "taiwanese": 41416, "follo": 41417, "css": 41418, "reign": 41419, "nnnn": 41420, "flaun": 41421, "catastrophe": 41422, "ities": 41423, "fragments": 41424, "extremists": 41425, "ymoun": 41426, "carmen": 41427, "ezekiel": 41428, "connecting": 41429, "seh": 41430, "manta": 41431, "remodeling": 41432, "weymouth": 41433, "atoms": 41434, "cem": 41435, "newell": 41436, "lumi": 41437, "theopen": 41438, "moc": 41439, "miliband": 41440, "gland": 41441, "zshq": 41442, "maggie": 41443, "maniacs": 41444, "msp": 41445, "ady": 41446, "creams": 41447, "leanne": 41448, "esta": 41449, "pyg": 41450, "affinity": 41451, "prayer": 41452, "dunbar": 41453, "lightroom": 41454, "acadi": 41455, "wynonna": 41456, "romantic": 41457, "statedept": 41458, "sickle": 41459, "whos": 41460, "lamo": 41461, "etour": 41462, "finity": 41463, "shrub": 41464, "sharpen": 41465, "pundit": 41466, "edon": 41467, "afore": 41468, "mars": 41469, "jeffery": 41470, "terps": 41471, "medallist": 41472, "katharine": 41473, "accusing": 41474, "taz": 41475, "royd": 41476, "fromhome": 41477, "confrontation": 41478, "allegh": 41479, "ðŁijīðŁijī": 41480, "refresher": 41481, "ranveer": 41482, "neverland": 41483, "jojo": 41484, "lucrative": 41485, "enam": 41486, "caver": 41487, "paedi": 41488, "manjaro": 41489, "fluids": 41490, "thessal": 41491, "oppressed": 41492, "muss": 41493, "johanna": 41494, "Ø®": 41495, "cng": 41496, "buildthe": 41497, "settles": 41498, "sith": 41499, "fuego": 41500, "clamp": 41501, "arag": 41502, "payer": 41503, "tedx": 41504, "mandy": 41505, "interstellar": 41506, "frc": 41507, "chand": 41508, "bcc": 41509, "molo": 41510, "lentil": 41511, "johansson": 41512, "grimsby": 41513, "naturelovers": 41514, "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, "shinde": 41516, "xin": 41517, "internationaldayof": 41518, "transitional": 41519, "sata": 41520, "caddy": 41521, "wod": 41522, "ifu": 41523, "hays": 41524, "hollyo": 41525, "jang": 41526, "irc": 41527, "coim": 41528, "gradable": 41529, "\"\"": 41530, "ðŁį“": 41531, "া": 41532, "ael": 41533, "nyo": 41534, "westlake": 41535, "timeout": 41536, "sofi": 41537, "phenomena": 41538, "cultivation": 41539, "agno": 41540, "unarmed": 41541, "sot": 41542, "conj": 41543, "geno": 41544, "royalnavy": 41545, "nutrition": 41546, "fairmont": 41547, "tirelessly": 41548, "sng": 41549, "rety": 41550, "mica": 41551, "lucent": 41552, "sloane": 41553, "drool": 41554, "rizal": 41555, "odell": 41556, "criticized": 41557, ".'\"": 41558, "laze": 41559, "deserted": 41560, "coder": 41561, "pras": 41562, "lillian": 41563, "itinerary": 41564, "davy": 41565, "anap": 41566, "whipping": 41567, "hoboken": 41568, "kareena": 41569, "羣": 41570, "vius": 41571, "tern": 41572, "nantucket": 41573, "misunderstood": 41574, "bulaga": 41575, "stant": 41576, "chinook": 41577, "zam": 41578, "relies": 41579, "dss": 41580, "edmond": 41581, "sketchy": 41582, "mell": 41583, "fex": 41584, "rector": 41585, "distill": 41586, "daydream": 41587, "winemaker": 41588, "ripley": 41589, "billionaires": 41590, "helene": 41591, "atif": 41592, "culprit": 41593, "bertrand": 41594, "wouldnt": 41595, "mapped": 41596, "vak": 41597, "gladly": 41598, "parliament": 41599, "kidlitart": 41600, "wareness": 41601, "goliath": 41602, "âĨĵ": 41603, "viewpoint": 41604, "tatted": 41605, "fuls": 41606, "dorsey": 41607, "anglers": 41608, "lids": 41609, "kiya": 41610, "bowles": 41611, "beh": 41612, "bite": 41613, "compatibility": 41614, "ancestral": 41615, "prox": 41616, "behaved": 41617, "gubernatorial": 41618, "chfield": 41619, "saban": 41620, "zh": 41621, "teeny": 41622, "shibuya": 41623, "holliday": 41624, "pancy": 41625, "âĿĦï¸ıâĿĦï¸ı": 41626, "seungri": 41627, "?,": 41628, "ðŁĩ¦ðŁĩ·": 41629, "imitation": 41630, "impactful": 41631, "anyi": 41632, "genevie": 41633, "años": 41634, "bateman": 41635, "glider": 41636, "afar": 41637, "rasheed": 41638, "effortless": 41639, "shwar": 41640, "dachsh": 41641, "erun": 41642, "atos": 41643, "kini": 41644, "chd": 41645, "khaki": 41646, "klin": 41647, "felicidades": 41648, "belo": 41649, "asl": 41650, "toppers": 41651, "finley": 41652, "stacey": 41653, "rigorous": 41654, "karting": 41655, "leppard": 41656, "carmichael": 41657, "beret": 41658, "cse": 41659, "akhi": 41660, "meringue": 41661, "aban": 41662, "hake": 41663, "geri": 41664, "erjee": 41665, "resto": 41666, "commanders": 41667, "prit": 41668, "flor": 41669, "adven": 41670, "extermin": 41671, "remainder": 41672, "ÄIJ": 41673, "esg": 41674, "martino": 41675, "lullaby": 41676, "|@": 41677, "mign": 41678, "instore": 41679, "bigbang": 41680, "cordi": 41681, "cauley": 41682, "antebellum": 41683, "dgate": 41684, "crock": 41685, "spandex": 41686, "scaffolding": 41687, "oreos": 41688, "ê°ĵìĦ¸ë¸IJ": 41689, "pomona": 41690, "mauro": 41691, "universi": 41692, "remi": 41693, "afootball": 41694, "tant": 41695, "smalls": 41696, "neh": 41697, "worldo": 41698, "tropical": 41699, "morph": 41700, "javelin": 41701, "glar": 41702, "arquitec": 41703, "reminiscent": 41704, "tubs": 41705, "spidey": 41706, "makeu": 41707, "sylla": 41708, "progressives": 41709, "blot": 41710, "shorten": 41711, "keepin": 41712, "chak": 41713, "angst": 41714, "superfood": 41715, "decadent": 41716, "stony": 41717, "neurological": 41718, "arboretum": 41719, "annak": 41720, "fema": 41721, "percu": 41722, "disrespectful": 41723, "smallbiz": 41724, "lox": 41725, "coom": 41726, "csc": 41727, "bsbi": 41728, "prevalence": 41729, "himss": 41730, "espan": 41731, "moga": 41732, "frampton": 41733, "skymap": 41734, "masse": 41735, "leviathan": 41736, "().": 41737, "nocturnal": 41738, "carameli": 41739, "angor": 41740, "amnesia": 41741, "outsiders": 41742, "shealth": 41743, "rhino": 41744, "antag": 41745, "agio": 41746, "ðŁē°ðŁē°": 41747, "takeme": 41748, "kabaddi": 41749, "csi": 41750, "msh": 41751, "cochrane": 41752, "thessaloni": 41753, "sila": 41754, "haus": 41755, "dusting": 41756, "obese": 41757, "macklemore": 41758, "manish": 41759, "lenin": 41760, "mdc": 41761, "grown": 41762, "sheffield": 41763, "srs": 41764, "kele": 41765, "carson": 41766, "chum": 41767, "dahlia": 41768, "cantore": 41769, "oppo": 41770, "howling": 41771, "cybercrime": 41772, "surrealism": 41773, "scran": 41774, "faiz": 41775, "thren": 41776, "racists": 41777, "rout": 41778, "pknot": 41779, "semana": 41780, "sini": 41781, "mccull": 41782, "machi": 41783, "alfonso": 41784, "yb": 41785, "sardar": 41786, "kendrick": 41787, "deng": 41788, "recipro": 41789, "onf": 41790, "doomsday": 41791, "bribery": 41792, "customiz": 41793, "artis": 41794, "cpi": 41795, "ðŁĻĪðŁĻĪ": 41796, "slava": 41797, "lette": 41798, "ens": 41799, "âĿ¤ï¸ıðŁĺĺ": 41800, "crayon": 41801, "adan": 41802, "trc": 41803, "migrate": 41804, "simpson": 41805, "rowers": 41806, "kingsley": 41807, "farmersmarket": 41808, "sheehan": 41809, "nephe": 41810, "bornon": 41811, "carton": 41812, "mickey": 41813, "allure": 41814, "ulu": 41815, "slipknot": 41816, "hebdo": 41817, "guido": 41818, "dogcelebration": 41819, "onlinemarketing": 41820, "accelerating": 41821, ")..": 41822, "originated": 41823, "macaroni": 41824, "edtech": 41825, "outfield": 41826, "mitz": 41827, "discus": 41828, "advertiser": 41829, "manor": 41830, "hashi": 41831, "descrip": 41832, "capita": 41833, "fulbright": 41834, "receptor": 41835, "conn": 41836, "coney": 41837, "spionage": 41838, "rattle": 41839, "prest": 41840, "uli": 41841, "blogpost": 41842, "ackeray": 41843, ")â̦": 41844, "redvelvet": 41845, "matth": 41846, "inspiring": 41847, "bsd": 41848, "kerri": 41849, "pocon": 41850, "millar": 41851, "repur": 41852, "accenture": 41853, "ä¹": 41854, "rambo": 41855, "ragnarok": 41856, "deleting": 41857, "britishmuseum": 41858, "patory": 41859, "leipzig": 41860, "florian": 41861, "scifi": 41862, "iners": 41863, "brate": 41864, "yoy": 41865, "melissa": 41866, "aber": 41867, "masa": 41868, "pote": 41869, "mosquitoes": 41870, "transplant": 41871, "rpa": 41872, ";))": 41873, "bastille": 41874, "ylan": 41875, "joyeux": 41876, "melodic": 41877, "captions": 41878, "atrist": 41879, "rochdale": 41880, "gotti": 41881, "pewdie": 41882, "cutiesaturday": 41883, "whois": 41884, "aquaculture": 41885, "tiva": 41886, "spel": 41887, "hess": 41888, "haji": 41889, "freddie": 41890, "coper": 41891, "brando": 41892, "vk": 41893, "photobook": 41894, "*,": 41895, "mydayin": 41896, "michaela": 41897, "brunei": 41898, "srini": 41899, "inte": 41900, "ı": 41901, "deol": 41902, "dfc": 41903, "separately": 41904, "bund": 41905, "vests": 41906, "toc": 41907, "meck": 41908, "reinforced": 41909, "constraints": 41910, "carroll": 41911, "sqft": 41912, "rever": 41913, "camper": 41914, "birdman": 41915, "inaction": 41916, "generators": 41917, "triumphant": 41918, "pests": 41919, "ovo": 41920, "gypt": 41921, "alamo": 41922, "scaled": 41923, "sureshpp": 41924, "sdn": 41925, "ismo": 41926, "gios": 41927, ")@": 41928, "justiceleague": 41929, "restaurant": 41930, "gabi": 41931, "dengue": 41932, "nextgen": 41933, "exempli": 41934, "apex": 41935, "inspirational": 41936, "downside": 41937, "kidz": 41938, "upl": 41939, "etna": 41940, "alvaro": 41941, "feldman": 41942, "barnet": 41943, "mha": 41944, "esch": 41945, "blooded": 41946, ">>>>>>>>": 41947, "kani": 41948, "hofficial": 41949, "casablanca": 41950, "birds": 41951, "tyga": 41952, "swamp": 41953, "oday": 41954, "newcastle": 41955, "nbap": 41956, "cision": 41957, "chools": 41958, "aflo": 41959, "nep": 41960, "monton": 41961, "akb": 41962, "supermodel": 41963, "downtime": 41964, "thos": 41965, "scwx": 41966, "snoopy": 41967, "aggreg": 41968, "yoke": 41969, "norcal": 41970, "wett": 41971, "prolonged": 41972, "metast": 41973, "beater": 41974, "fta": 41975, "tlap": 41976, "disgusted": 41977, "yh": 41978, "voiceover": 41979, "itchy": 41980, "ipc": 41981, "ðŁİ¾": 41982, "pheasant": 41983, "straits": 41984, "rampant": 41985, "jg": 41986, "fertil": 41987, "assures": 41988, "fortunes": 41989, "salinas": 41990, "lizards": 41991, "kettle": 41992, "ibs": 41993, "cynthi": 41994, "heg": 41995, "mccr": 41996, "socceroos": 41997, "happenings": 41998, "corden": 41999, "ðŁĺĤðŁijĮ": 42000, "tches": 42001, "egret": 42002, "wolverines": 42003, "congratulated": 42004, "hogg": 42005, "bottling": 42006, "wri": 42007, "ferri": 42008, "bosch": 42009, "afire": 42010, "ogden": 42011, "sjo": 42012, "jdm": 42013, "svt": 42014, "contex": 42015, "tollywood": 42016, "mink": 42017, "mese": 42018, "supersonic": 42019, "opoulos": 42020, "ĸ": 42021, "âĶģ": 42022, "knuckle": 42023, "guise": 42024, "gami": 42025, "chucky": 42026, "zinger": 42027, "radial": 42028, "complained": 42029, "boda": 42030, "fetal": 42031, "disciplines": 42032, "corro": 42033, "ðŁĩ®ðŁĩ¹": 42034, "opted": 42035, "filtration": 42036, "adnan": 42037, "emcee": 42038, "mistre": 42039, "insomni": 42040, "fergus": 42041, "trajec": 42042, "ondon": 42043, "medtech": 42044, "tangerine": 42045, "madras": 42046, "grue": 42047, "cabs": 42048, "zhu": 42049, "sureshpprabhu": 42050, "insulated": 42051, "dayswild": 42052, "ppm": 42053, "bandai": 42054, "vday": 42055, "sff": 42056, "squid": 42057, "lothing": 42058, "notdead": 42059, "expressive": 42060, "cull": 42061, "alastair": 42062, "xu": 42063, "upfront": 42064, "fishers": 42065, "enes": 42066, "umd": 42067, "dismissal": 42068, "stier": 42069, "sels": 42070, "lust": 42071, "reactive": 42072, "protester": 42073, "eyelashes": 42074, "alim": 42075, "goode": 42076, "greeng": 42077, "dair": 42078, "compen": 42079, "anushka": 42080, "prototyping": 42081, "mapu": 42082, "bearings": 42083, "ðŁIJŁ": 42084, "forme": 42085, "bsbibotany": 42086, "timothy": 42087, "outskirts": 42088, "ambed": 42089, "aretha": 42090, "wendell": 42091, "streaks": 42092, "nim": 42093, "kpk": 42094, "snee": 42095, "fitter": 42096, "quota": 42097, "pate": 42098, "winning": 42099, "ðŁįŃ": 42100, "shopping": 42101, "mainst": 42102, "culver": 42103, "stevie": 42104, "mcfadden": 42105, "counterparts": 42106, "grenfell": 42107, "folsom": 42108, "dorset": 42109, "techcrunch": 42110, "â¬ħï¸ı": 42111, "tiptuesday": 42112, "usl": 42113, "trex": 42114, "georgie": 42115, "ranveerofficial": 42116, "licks": 42117, "sewn": 42118, "kf": 42119, "'â̦": 42120, "japs": 42121, "pate": 42122, "orthop": 42123, "festa": 42124, "stras": 42125, "montal": 42126, "hammersmith": 42127, "foremost": 42128, "widows": 42129, "madre": 42130, "itez": 42131, "mitochondri": 42132, "ligans": 42133, "zona": 42134, "caribou": 42135, "mss": 42136, "andrei": 42137, "weatherchannel": 42138, "ghc": 42139, ":...": 42140, "taft": 42141, "aweather": 42142, "alisation": 42143, "brutal": 42144, "blissful": 42145, "nikola": 42146, "malicious": 42147, "qm": 42148, "mpgvip": 42149, "brodie": 42150, "blitz": 42151, "applaud": 42152, "dribb": 42153, "vague": 42154, "doggo": 42155, "translating": 42156, "interpreted": 42157, "hatched": 42158, "getyour": 42159, "beneficiaries": 42160, "sparring": 42161, "caesars": 42162, "awilliams": 42163, "lahat": 42164, "broke": 42165, "timp": 42166, "virtues": 42167, "relying": 42168, "pietro": 42169, "ktn": 42170, "icists": 42171, "pablo": 42172, "loui": 42173, "aag": 42174, "pnpp": 42175, "chast": 42176, "pulses": 42177, "finish": 42178, "usairforce": 42179, "typewriter": 42180, "thompson": 42181, "dogs": 42182, "utto": 42183, "ãģį": 42184, "sandal": 42185, "newly": 42186, "doge": 42187, "zw": 42188, "wankers": 42189, "negr": 42190, "mucha": 42191, "determines": 42192, "blackfish": 42193, "skunk": 42194, "mups": 42195, "instrument": 42196, "phyto": 42197, "daystogo": 42198, "skinned": 42199, "haider": 42200, "conten": 42201, "ðŁIJ¾ðŁIJ¾": 42202, "weiler": 42203, "undoubtedly": 42204, "chairing": 42205, "wallis": 42206, "shard": 42207, "zindabad": 42208, "adult": 42209, "absorption": 42210, "presto": 42211, "deploying": 42212, "drummond": 42213, "battlefront": 42214, "seagulls": 42215, "howdy": 42216, "judaism": 42217, "desde": 42218, "partition": 42219, "âľĿ": 42220, "nology": 42221, "nationalbestfriend": 42222, "lesnar": 42223, "filmfare": 42224, "coasts": 42225, "christensen": 42226, "acan": 42227, "mbu": 42228, "copped": 42229, "rubble": 42230, "swc": 42231, "funnier": 42232, "farther": 42233, "whereas": 42234, "nanotechnology": 42235, "withstand": 42236, "pillow": 42237, "bowers": 42238, "tope": 42239, "itly": 42240, "confit": 42241, "makar": 42242, "comforts": 42243, "bosh": 42244, "clipper": 42245, "balla": 42246, "stik": 42247, "milb": 42248, "safeguard": 42249, "musique": 42250, "easport": 42251, "yaz": 42252, "padded": 42253, "bader": 42254, "foreign": 42255, "chopin": 42256, "archive": 42257, "oka": 42258, "transporting": 42259, "tmltalk": 42260, "ajit": 42261, "consequence": 42262, "scroo": 42263, "ffo": 42264, "collaborated": 42265, "pugchat": 42266, "yemi": 42267, "javed": 42268, "auburn": 42269, "oof": 42270, "maw": 42271, "saucer": 42272, "mitigate": 42273, "iles": 42274, "evangelist": 42275, "terie": 42276, "recl": 42277, "indictment": 42278, "cata": 42279, "brightness": 42280, "maythe": 42281, "whimsical": 42282, "unlv": 42283, "keyword": 42284, "cumin": 42285, "medway": 42286, "westworld": 42287, "traw": 42288, "imposing": 42289, "formity": 42290, "coulter": 42291, "abz": 42292, "nypd": 42293, "grassi": 42294, "kelsey": 42295, "qldpol": 42296, "clockwork": 42297, "fdr": 42298, "dianne": 42299, "âĺij": 42300, "adh": 42301, "pann": 42302, "bravely": 42303, "aege": 42304, "unlawful": 42305, "verdi": 42306, "pocalypse": 42307, "pharo": 42308, "karla": 42309, "resonance": 42310, "mastiff": 42311, "ladak": 42312, "buu": 42313, "mailed": 42314, "hii": 42315, "crawley": 42316, "torrent": 42317, "machado": 42318, "libyan": 42319, "effortlessly": 42320, "falsely": 42321, "qvist": 42322, "keef": 42323, "crafthour": 42324, "cherished": 42325, "valkyrie": 42326, "sari": 42327, "kalamaz": 42328, "behe": 42329, "ðŁĮĻ": 42330, "thim": 42331, "roddy": 42332, "coltrane": 42333, "butchers": 42334, "achim": 42335, "wkend": 42336, "awkward": 42337, "cabrera": 42338, ":))))": 42339, "franc": 42340, "declan": 42341, "condos": 42342, "aja": 42343, "pandoramusic": 42344, "charter": 42345, "phill": 42346, "montrose": 42347, "hatchback": 42348, "handicapp": 42349, "greaves": 42350, "eucalyptus": 42351, "utmost": 42352, "tson": 42353, "burton": 42354, "midwives": 42355, "incur": 42356, "ðŁĺį#": 42357, "mood": 42358, "compressed": 42359, "toma": 42360, "mustang": 42361, "mog": 42362, "asana": 42363, "testic": 42364, "shotel": 42365, "insol": 42366, "corsair": 42367, "nhq": 42368, "benny": 42369, "smma": 42370, "kapur": 42371, "incon": 42372, "jonas": 42373, "energies": 42374, "donal": 42375, "asad": 42376, "sez": 42377, "npa": 42378, "archived": 42379, "stimulate": 42380, "dop": 42381, "hyd": 42382, "grieving": 42383, "ãĄĪ": 42384, "rona": 42385, "whyte": 42386, "treehouse": 42387, "ssell": 42388, "sandro": 42389, "kobo": 42390, "thermost": 42391, "seclu": 42392, "hiya": 42393, "geez": 42394, "mamas": 42395, "priscilla": 42396, "flavoured": 42397, "fass": 42398, "wold": 42399, "makerspace": 42400, "cosplay": 42401, "ptv": 42402, "happyvalentinesday": 42403, "sequoia": 42404, "lovecraft": 42405, "guan": 42406, "dtm": 42407, "cii": 42408, "yokohama": 42409, "posthum": 42410, "req": 42411, "ðŁĶµâļªï¸ı": 42412, "galatasar": 42413, "dolby": 42414, "hamptons": 42415, "disturbance": 42416, "stonehenge": 42417, "okc": 42418, "disrupting": 42419, "monthsary": 42420, "jungle": 42421, "headlights": 42422, "dustin": 42423, "microsof": 42424, "happymothersday": 42425, "koko": 42426, "grazi": 42427, "testo": 42428, "naidu": 42429, "malay": 42430, "arial": 42431, "rumb": 42432, "aboo": 42433, "harman": 42434, "trape": 42435, "spoils": 42436, "jeho": 42437, "godly": 42438, "lockscreen": 42439, "zun": 42440, "pious": 42441, "magento": 42442, "lenders": 42443, "probable": 42444, "corporal": 42445, "mour": 42446, "awal": 42447, "sua": 42448, "callme": 42449, "tonne": 42450, "govin": 42451, "devastation": 42452, "xj": 42453, "gearbox": 42454, "warlock": 42455, "perme": 42456, "itate": 42457, "gazaunderattack": 42458, "duval": 42459, "parasite": 42460, "clemente": 42461, "leth": 42462, "iva": 42463, "frozen": 42464, "tholes": 42465, "tobin": 42466, "cairn": 42467, "sill": 42468, "luckiest": 42469, "converts": 42470, "stale": 42471, "pancra": 42472, "europale": 42473, "wisdom": 42474, "schur": 42475, "ì¶": 42476, "vertigo": 42477, "bij": 42478, "ubc": 42479, "nure": 42480, "righteousness": 42481, "mtc": 42482, "factory": 42483, "verst": 42484, "reversed": 42485, "huri": 42486, "heechul": 42487, "faber": 42488, "arr": 42489, "ulous": 42490, "venom": 42491, "phat": 42492, "greenery": 42493, "brady": 42494, "æ": 42495, ":((": 42496, "nevergiveup": 42497, "disha": 42498, "mota": 42499, "healthcare": 42500, "dunham": 42501, "dexpo": 42502, "denzel": 42503, "bbins": 42504, "fics": 42505, "wham": 42506, "mcg": 42507, "elian": 42508, "wata": 42509, "stralia": 42510, "tellu": 42511, "pesky": 42512, "spinoff": 42513, "armoured": 42514, "reacted": 42515, "dofficial": 42516, "tedu": 42517, "sagar": 42518, "morally": 42519, "paralleled": 42520, "fios": 42521, "downer": 42522, "daugh": 42523, "redo": 42524, "worldcup": 42525, "tariq": 42526, "barne": 42527, "glaciers": 42528, "occult": 42529, "barbarian": 42530, "hermosa": 42531, "!!!)": 42532, "yur": 42533, "internation": 42534, "pss": 42535, "situ": 42536, "pint": 42537, "americanair": 42538, "swam": 42539, "doppler": 42540, "ðŁēĻðŁēľ": 42541, "cincodemayo": 42542, "levan": 42543, "hellenic": 42544, "mcne": 42545, "judi": 42546, "yuh": 42547, "stx": 42548, "quare": 42549, "ðŁĺĤ.": 42550, "stig": 42551, "gels": 42552, "motley": 42553, "hardwork": 42554, "eurozone": 42555, "ead": 42556, "ç„Ń": 42557, "seabir": 42558, "cius": 42559, "laid": 42560, "alpaca": 42561, "presumably": 42562, "pewdiepie": 42563, "booted": 42564, "amari": 42565, "tamine": 42566, "solace": 42567, "barrow": 42568, "academies": 42569, "xian": 42570, "omination": 42571, "dungeons": 42572, "bma": 42573, "deity": 42574, "aik": 42575, "stabil": 42576, "hira": 42577, "affectionate": 42578, "vingne": 42579, "newport": 42580, "ãħĭãħĭ": 42581, "thirds": 42582, "retains": 42583, "aromatherapy": 42584, "skier": 42585, "nima": 42586, "dope": 42587, "cringe": 42588, "condomin": 42589, "toor": 42590, "animator": 42591, "saraj": 42592, "seascape": 42593, "minimalism": 42594, "lakeshore": 42595, "callaway": 42596, "bergman": 42597, "à¤Ĺ": 42598, "whispering": 42599, "stupid": 42600, "rightful": 42601, "requis": 42602, "irn": 42603, "seva": 42604, "utpol": 42605, "tuberculo": 42606, "squish": 42607, "debut": 42608, "governmental": 42609, "christine": 42610, "allman": 42611, "weapon": 42612, "sito": 42613, "buri": 42614, "lolita": 42615, "leafy": 42616, "fuch": 42617, "tinted": 42618, "mcken": 42619, "ahahaha": 42620, "ðŁĩµðŁĩ¹": 42621, "repeal": 42622, "negan": 42623, "ðŁķĬ": 42624, "tailgating": 42625, "gameinsight": 42626, "ðŁıŁï¸ı": 42627, "yakuza": 42628, "zt": 42629, "tiring": 42630, "proposing": 42631, "bowlers": 42632, "traitors": 42633, "akshi": 42634, "clergy": 42635, "cito": 42636, "upsets": 42637, "tuscal": 42638, "symphonic": 42639, "silently": 42640, "shuff": 42641, "blackwell": 42642, "ðŁĺĤ)": 42643, "kobe": 42644, "roberto": 42645, "ridg": 42646, "dcu": 42647, "merino": 42648, "ftp": 42649, "eastside": 42650, ".~": 42651, "nbl": 42652, "mnleg": 42653, "tsfor": 42654, "fraudul": 42655, "capping": 42656, "inmy": 42657, "gymnast": 42658, "stones": 42659, "ssin": 42660, "tweaks": 42661, "shaggy": 42662, "oakland": 42663, "demsin": 42664, "sangria": 42665, "mmva": 42666, "hennessy": 42667, "downton": 42668, "rightly": 42669, "init": 42670, "agave": 42671, "oblast": 42672, "northeast": 42673, "friendship": 42674, "dala": 42675, "trophy": 42676, "ðŁij½": 42677, "magin": 42678, "margaritas": 42679, "ĆŖĀ·": 42680, "wwfc": 42681, "fash": 42682, "dike": 42683, "cud": 42684, "chart": 42685, "ðŁij®": 42686, "refugees": 42687, "joplin": 42688, "ncs": 42689, "impy": 42690, "firmware": 42691, "pascu": 42692, "flamin": 42693, "healthtech": 42694, "bellletstalk": 42695, "waka": 42696, "olls": 42697, "lago": 42698, "cowan": 42699, "bombardier": 42700, "shome": 42701, "ðŁĻħ": 42702, "mcmaster": 42703, "nave": 42704, "wells": 42705, "uta": 42706, "tellers": 42707, "misfits": 42708, "kapil": 42709, "faceoff": 42710, "affirm": 42711, "apro": 42712, "whitepaper": 42713, "superyacht": 42714, "specimens": 42715, "allocated": 42716, "...,": 42717, "-__": 42718, "kaw": 42719, "dachshund": 42720, "djoker": 42721, "swork": 42722, "quiere": 42723, "orum": 42724, "ðŁIJł": 42725, "somm": 42726, "cmt": 42727, "inghour": 42728, "skinny": 42729, "lgbti": 42730, "giggles": 42731, "breakaway": 42732, "researched": 42733, "parity": 42734, "myal": 42735, "msl": 42736, "retained": 42737, "sivity": 42738, "makeinindia": 42739, "solves": 42740, "defamation": 42741, "waltham": 42742, "sriracha": 42743, "roadway": 42744, "conceptu": 42745, "alin": 42746, "iwant": 42747, "ÄĪ": 42748, "delft": 42749, "tenderloin": 42750, "gains": 42751, "faults": 42752, "swire": 42753, "stellen": 42754, "pollo": 42755, "dyne": 42756, "bornonthisday": 42757, "asdfghj": 42758, "sql": 42759, "salim": 42760, "advises": 42761, "voip": 42762, "ìĹijìĨ": 42763, "untouched": 42764, "sheil": 42765, "ontario": 42766, "uphill": 42767, "sobre": 42768, "deshi": 42769, "novella": 42770, "dutton": 42771, "crawfish": 42772, "Ć˜Ā§Ć™ÄØ": 42773, "maa": 42774, "twine": 42775, "kalin": 42776, "ðŁĩµðŁĩŃ": 42777, "yess": 42778, "brooks": 42779, "hoosiers": 42780, "tonka": 42781, "umbrellas": 42782, "ayers": 42783, "ateam": 42784, "acquiring": 42785, "suction": 42786, "än": 42787, "wies": 42788, "tarians": 42789, "socio": 42790, "mattb": 42791, "shepherds": 42792, "oso": 42793, "charitytuesday": 42794, "slogans": 42795, "ninjas": 42796, "albat": 42797, "byte": 42798, "bashir": 42799, "trampoline": 42800, "mydayinla": 42801, "ija": 42802, "basel": 42803, "rory": 42804, "goldie": 42805, "firec": 42806, "unnoticed": 42807, "peculiar": 42808, "scha": 42809, "kerson": 42810, "mourns": 42811, "liquidity": 42812, "quipment": 42813, "hibs": 42814, "ars": 42815, "aeronau": 42816, "slideshow": 42817, "slabs": 42818, "deliciousness": 42819, "skitchen": 42820, "htafc": 42821, "fullerton": 42822, "creighton": 42823, "aerob": 42824, "procrastination": 42825, "azores": 42826, "whitehall": 42827, "ussoccer": 42828, "mediation": 42829, "djokernole": 42830, "andme": 42831, "umen": 42832, "noxious": 42833, "joss": 42834, "ilife": 42835, "annivers": 42836, "sudanese": 42837, "etres": 42838, "undermine": 42839, "wholefoods": 42840, "disobe": 42841, "kori": 42842, "adele": 42843, "eliz": 42844, "canti": 42845, "alon": 42846, "gymnasium": 42847, "sarkodie": 42848, "meteorologist": 42849, "ylde": 42850, "steen": 42851, "stampcollecting": 42852, "nasal": 42853, "lott": 42854, "franks": 42855, "exol": 42856, "acki": 42857, "goodyear": 42858, "animalrights": 42859, "yles": 42860, "violets": 42861, "mmes": 42862, "sthel": 42863, "rapping": 42864, "tuscan": 42865, "waiver": 42866, "turner": 42867, "eatlocal": 42868, "northeasthour": 42869, "animations": 42870, "tommorow": 42871, "tsh": 42872, "ffame": 42873, "brae": 42874, "petron": 42875, "glamour": 42876, "bryn": 42877, "dcs": 42878, "bales": 42879, "ðŁĶ¶": 42880, "brov": 42881, "brev": 42882, "bons": 42883, "physique": 42884, "carne": 42885, "xe": 42886, "elixir": 42887, "volved": 42888, "loma": 42889, "ìľł": 42890, "æĺ": 42891, "vanu": 42892, "rigs": 42893, "balance": 42894, "vares": 42895, "bonita": 42896, "sprinkle": 42897, "perfecto": 42898, "dion": 42899, "leak": 42900, "calcutta": 42901, "oba": 42902, "dma": 42903, "cmon": 42904, "tuner": 42905, "pneumonia": 42906, "bogus": 42907, "apologe": 42908, "clough": 42909, "borne": 42910, "))))": 42911, "revived": 42912, "ovarian": 42913, "nerf": 42914, "clegg": 42915, "fanfest": 42916, "chou": 42917, "realizes": 42918, "mcn": 42919, "ligu": 42920, "legalize": 42921, "justsaying": 42922, "forster": 42923, "bosni": 42924, "khi": 42925, "indom": 42926, "heidel": 42927, "encryp": 42928, "siss": 42929, "eddi": 42930, "marbles": 42931, "brisbane": 42932, "ying": 42933, "prepaid": 42934, "walsall": 42935, "cooperate": 42936, "orchestr": 42937, "marisa": 42938, "howie": 42939, "chewy": 42940, "brenner": 42941, "andromeda": 42942, "egan": 42943, "stocki": 42944, "cavendish": 42945, "agan": 42946, "bano": 42947, "deir": 42948, "gog": 42949, "blk": 42950, "rethinking": 42951, "chig": 42952, "rheu": 42953, "snip": 42954, "peng": 42955, "seminole": 42956, "mswx": 42957, "annex": 42958, "lynda": 42959, "lewishamilton": 42960, "cumul": 42961, "tbl": 42962, "dolphin": 42963, "aguero": 42964, "............": 42965, "prelude": 42966, "atour": 42967, "granger": 42968, "tooting": 42969, "rotun": 42970, "disar": 42971, "homeitems": 42972, "dares": 42973, "********": 42974, "ðŁijĨ": 42975, "compreh": 42976, "jinx": 42977, "aswell": 42978, "irie": 42979, "circulating": 42980, "ðŁIJ„": 42981, "overboard": 42982, "cultivate": 42983, "rhett": 42984, "orienteering": 42985, "cak": 42986, "balkans": 42987, "sitt": 42988, "jasmin": 42989, "britneyspears": 42990, "rotor": 42991, "sealing": 42992, "gbc": 42993, "occi": 42994, "fas": 42995, "emancip": 42996, "comer": 42997, "wartime": 42998, "tickle": 42999, "sonny": 43000, "paces": 43001, "logg": 43002, "atrix": 43003, "srp": 43004, "gwin": 43005, "dobbs": 43006, "uzbe": 43007, "thewanted": 43008, "drush": 43009, "extru": 43010, "micky": 43011, "honorees": 43012, "darwin": 43013, "redux": 43014, "mmj": 43015, "rami": 43016, "jalapeño": 43017, "ioc": 43018, "dover": 43019, "juju": 43020, "whitney": 43021, "seng": 43022, "enly": 43023, "auch": 43024, "archipelago": 43025, "vigilant": 43026, "mangal": 43027, "wildest": 43028, "paranoid": 43029, "hali": 43030, "bbly": 43031, "sanctioned": 43032, "realms": 43033, "conco": 43034, "uddin": 43035, "csk": 43036, "playtime": 43037, "libra": 43038, "savag": 43039, "octane": 43040, "rectan": 43041, "return": 43042, "parrish": 43043, "morrha": 43044, "ccp": 43045, "cmu": 43046, "sailed": 43047, "sevent": 43048, "rosie": 43049, "piling": 43050, "hew": 43051, "boarded": 43052, "segments": 43053, "nephro": 43054, "(.": 43055, "crats": 43056, "bakes": 43057, "ðŁį¸": 43058, "backtothe": 43059, "sibling": 43060, "kirkland": 43061, "keo": 43062, "guwa": 43063, "breads": 43064, "ðŁĺľðŁĺľ": 43065, "tq": 43066, "harassed": 43067, "gau": 43068, "wilbur": 43069, "jisoo": 43070, "eper": 43071, "lisam": 43072, "trippin": 43073, "shino": 43074, "rukh": 43075, "beastmode": 43076, "choa": 43077, "instaweather": 43078, "richland": 43079, "gari": 43080, "fez": 43081, "cowboysnation": 43082, "fursuit": 43083, "krun": 43084, "aen": 43085, "sycamore": 43086, "segun": 43087, "entennial": 43088, "dih": 43089, "oax": 43090, "demsinphilly": 43091, "ðŁĻĢ": 43092, "snhl": 43093, "pennies": 43094, "passwords": 43095, "makin": 43096, "tye": 43097, "deng": 43098, "knigh": 43099, "jeeplife": 43100, "helpline": 43101, "afor": 43102, "zzzz": 43103, "steamy": 43104, "picker": 43105, "iterate": 43106, "happeningnow": 43107, "kib": 43108, "bloomberg": 43109, "martyrdom": 43110, "bully": 43111, "assortment": 43112, "ahora": 43113, "zoe": 43114, "noi": 43115, "illustri": 43116, "agarwal": 43117, "psc": 43118, "electronica": 43119, "recruiter": 43120, "gardiner": 43121, "radha": 43122, "nafta": 43123, "dotnet": 43124, "piero": 43125, "georg": 43126, "bels": 43127, "ðŁĺĤðŁĺį": 43128, "tuberculosis": 43129, "runnin": 43130, "moris": 43131, "hauling": 43132, "evoc": 43133, "brethren": 43134, "shair": 43135, "frameworks": 43136, "astu": 43137, "rigid": 43138, "kuma": 43139, "kreme": 43140, "jinnah": 43141, "insurers": 43142, "nyu": 43143, "fere": 43144, "nollywood": 43145, "goodvibes": 43146, "-...": 43147, "toile": 43148, "skril": 43149, "instaweatherpro": 43150, "czech": 43151, "pavel": 43152, "onepiece": 43153, "nikeplus": 43154, "filet": 43155, "cavity": 43156, "ðŁı½âĢįâĻĤï¸ı": 43157, "ðŁİ£": 43158, "drastic": 43159, "dailys": 43160, "siamese": 43161, "rebu": 43162, "osteo": 43163, "lark": 43164, "fre": 43165, "shelling": 43166, "pé": 43167, "gladys": 43168, "ðŁıĢðŁıĢ": 43169, "gustave": 43170, "submerged": 43171, "grandstand": 43172, "attu": 43173, "wont": 43174, "fpv": 43175, "bley": 43176, "joni": 43177, "angames": 43178, "weighted": 43179, "alou": 43180, "श": 43181, "lesbians": 43182, "fj": 43183, "annies": 43184, "aml": 43185, "doria": 43186, "davin": 43187, "beta": 43188, "canc": 43189, "madewithunity": 43190, "haj": 43191, "badlands": 43192, "mul": 43193, "bluec": 43194, "pawn": 43195, "covington": 43196, "neurology": 43197, "httweets": 43198, "dyslexia": 43199, "thelove": 43200, "neat": 43201, "forklift": 43202, "automate": 43203, "uneven": 43204, "montess": 43205, "hein": 43206, "hag": 43207, "relics": 43208, "competitiveness": 43209, "canelo": 43210, "martens": 43211, "bulletproof": 43212, "skittles": 43213, "gya": 43214, "primo": 43215, "americafirst": 43216, "wooo": 43217, "abortions": 43218, "??!!": 43219, "mache": 43220, "lders": 43221, "rlly": 43222, "prelims": 43223, "direct": 43224, "course": 43225, "swain": 43226, "supercell": 43227, "eccentric": 43228, "stingray": 43229, "plets": 43230, "wilcox": 43231, "westin": 43232, "okanagan": 43233, "kiran": 43234, "carbo": 43235, "bombings": 43236, "rarest": 43237, "boh": 43238, "gawd": 43239, "digg": 43240, "moana": 43241, "entirety": 43242, "enclosed": 43243, "dodgeball": 43244, "parton": 43245, "milkyway": 43246, "atr": 43247, "thoroughbred": 43248, "really": 43249, "qantas": 43250, "epiphany": 43251, "inee": 43252, "aerosmith": 43253, "spieth": 43254, "arthro": 43255, "ellini": 43256, "dubu": 43257, "braving": 43258, "âļ½âļ½": 43259, "restructuring": 43260, "illuminate": 43261, "equili": 43262, "mpi": 43263, "ashton": 43264, "ponytail": 43265, "mascots": 43266, "flattering": 43267, "crum": 43268, "asta": 43269, "à®°": 43270, "strangerthings": 43271, "barnab": 43272, "Ć˜Ā±Ć™Ä¬": 43273, "makeshift": 43274, "gotcha": 43275, "willam": 43276, "choirs": 43277, "kilometres": 43278, "ghosh": 43279, "euthan": 43280, "dolly": 43281, "unning": 43282, "thear": 43283, "crewe": 43284, "wsw": 43285, "jace": 43286, "dismiss": 43287, "kean": 43288, "hota": 43289, "khat": 43290, "~>": 43291, "thiru": 43292, "rendez": 43293, "hartman": 43294, "teessi": 43295, "casca": 43296, "zah": 43297, "hydrange": 43298, "fod": 43299, "awp": 43300, "mzansi": 43301, "thicker": 43302, "nagoya": 43303, "neva": 43304, "stique": 43305, "castel": 43306, "damian": 43307, "thereby": 43308, "jiang": 43309, "alek": 43310, "musicislife": 43311, "raq": 43312, "callahan": 43313, "gouache": 43314, "somaliland": 43315, "seanhannity": 43316, "raheem": 43317, "lose": 43318, "elove": 43319, "wharton": 43320, "rectangular": 43321, "illustrating": 43322, "harne": 43323, "autisma": 43324, "scrapped": 43325, "elland": 43326, "decree": 43327, "nagpur": 43328, "kipp": 43329, "sore": 43330, "nmd": 43331, "maas": 43332, "guna": 43333, "gartner": 43334, "belli": 43335, "thenight": 43336, "jeon": 43337, "genderequality": 43338, "giver": 43339, "ael": 43340, "garments": 43341, "neu": 43342, "mardigras": 43343, "marsden": 43344, "rower": 43345, "polluted": 43346, "cameraman": 43347, "vinod": 43348, "beasley": 43349, "croc": 43350, "jiu": 43351, "hollyoaks": 43352, "anesthesia": 43353, "alles": 43354, "steward": 43355, "latimes": 43356, "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, "tician": 43358, "goria": 43359, "comedic": 43360, "ð٤Ķð٤Ķð٤Ķ": 43361, "naive": 43362, "slions": 43363, "łĪ": 43364, "burglar": 43365, "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, "yorkshi": 43367, "señ": 43368, "fanboy": 43369, "laurel": 43370, "incidence": 43371, "potomac": 43372, "roberta": 43373, "presiden": 43374, "pryor": 43375, "osbourne": 43376, "wku": 43377, "teme": 43378, "palae": 43379, "ðŁ„º": 43380, "reboun": 43381, "itude": 43382, "reddish": 43383, "khand": 43384, "colonialism": 43385, "northcarolina": 43386, "ðĿē": 43387, "mannequin": 43388, "ladybird": 43389, "tasty": 43390, "knowledgeable": 43391, "gshore": 43392, "ðŁĮĮ": 43393, "ன": 43394, "quaker": 43395, "salzburg": 43396, "medalists": 43397, "chyna": 43398, "bridesmaid": 43399, "maori": 43400, "rop": 43401, "outraged": 43402, "inadequate": 43403, "truckers": 43404, "alana": 43405, "ìĿ¼": 43406, "rix": 43407, "oooooooo": 43408, "commandments": 43409, "lambeth": 43410, "aaj": 43411, "ecofriendly": 43412, "blaz": 43413, "morecambe": 43414, "bouncy": 43415, "roux": 43416, "raided": 43417, "mized": 43418, "shc": 43419, "gawx": 43420, "laboratories": 43421, "rubs": 43422, "restroom": 43423, "consultations": 43424, "cajun": 43425, "virgini": 43426, "soir": 43427, "revue": 43428, "plein": 43429, "wager": 43430, "ç¹": 43431, "wedo": 43432, "growingup": 43433, "!ðŁĺĬ": 43434, "faceted": 43435, "sinners": 43436, "hovering": 43437, "tiene": 43438, "seasoning": 43439, "anja": 43440, "leggo": 43441, "ilis": 43442, "flax": 43443, "devo": 43444, "ashram": 43445, "matisse": 43446, "keri": 43447, "gower": 43448, "botox": 43449, "marshes": 43450, "unhcr": 43451, "tsm": 43452, "optimus": 43453, "duni": 43454, "stuffs": 43455, "sok": 43456, "orderly": 43457, "nbad": 43458, "islamophobia": 43459, "ravioli": 43460, "faber": 43461, "creds": 43462, "wonka": 43463, "infusion": 43464, "overweight": 43465, "dailynews": 43466, "assimil": 43467, "acollege": 43468, "medallion": 43469, "kilimanjaro": 43470, "stiff": 43471, "thames": 43472, "sunken": 43473, "thard": 43474, "mydubai": 43475, "hilariously": 43476, "hannel": 43477, "plumber": 43478, "fairview": 43479, "separating": 43480, "rascal": 43481, "quien": 43482, "necessities": 43483, "confederation": 43484, "llll": 43485, ":]": 43486, "weaknesses": 43487, "bronco": 43488, "raffles": 43489, "elot": 43490, "ãĤ¸ãĄ": 43491, "adventcalendar": 43492, "ðŁİ¹": 43493, "stravel": 43494, "tunic": 43495, "ksu": 43496, "impeach": 43497, "espionage": 43498, "!-": 43499, "diment": 43500, "currant": 43501, "biode": 43502, "commuting": 43503, "byron": 43504, "ðŁēĵðŁēĵ": 43505, "shaded": 43506, "truro": 43507, "crayons": 43508, "arne": 43509, "hsc": 43510, "freaked": 43511, "dramati": 43512, "fleek": 43513, "ucd": 43514, "marlborough": 43515, "^-": 43516, "crossings": 43517, "malo": 43518, "blackops": 43519, "binance": 43520, "choked": 43521, "cheney": 43522, "plo": 43523, "gestures": 43524, "valedic": 43525, "ryanair": 43526, "remington": 43527, "vcs": 43528, "mckee": 43529, "ecz": 43530, "begs": 43531, "nailart": 43532, "mayorof": 43533, "happyfathersday": 43534, "wart": 43535, "petitions": 43536, "ningly": 43537, "cleanenergy": 43538, "brox": 43539, "slalom": 43540, "existent": 43541, "abay": 43542, "ugliest": 43543, "tomp": 43544, "stoma": 43545, "selby": 43546, "goalscorer": 43547, "benji": 43548, "overwhelmingly": 43549, "lans": 43550, "semiconductor": 43551, "southkorea": 43552, "rescheduled": 43553, "skyl": 43554, "enlisted": 43555, "dowski": 43556, "sidel": 43557, "rosenberg": 43558, "nasser": 43559, "whitehead": 43560, "prius": 43561, "harare": 43562, "enn": 43563, "ryder": 43564, "ƭĤ": 43565, "mong": 43566, "clasico": 43567, "transporter": 43568, "potty": 43569, "isme": 43570, "*****": 43571, "vice": 43572, "skit": 43573, "odessa": 43574, "lmp": 43575, "hern": 43576, "racially": 43577, "pinoy": 43578, "paraguay": 43579, "obituary": 43580, "goes": 43581, "bucha": 43582, "sidewalks": 43583, "angular": 43584, "unconstitutional": 43585, "transitioning": 43586, "ibu": 43587, "guys": 43588, "unpacking": 43589, "oooooo": 43590, "blackgirl": 43591, "bergs": 43592, "¯": 43593, "wordoftheday": 43594, "trumptrain": 43595, "thunderbolt": 43596, "msi": 43597, "fascists": 43598, "ब": 43599, "tsk": 43600, "collapses": 43601, "rajesh": 43602, "loveislove": 43603, "migrating": 43604, "setback": 43605, "ðŁĺĬâĿ¤ï¸ı": 43606, "tels": 43607, "safetyfirst": 43608, "narrated": 43609, "jaejoong": 43610, "unanswered": 43611, "liqueur": 43612, "ennes": 43613, "dalgo": 43614, "billings": 43615, "saltwater": 43616, "mermaids": 43617, "longs": 43618, "clapham": 43619, "wearec": 43620, "piccollage": 43621, "nach": 43622, "hace": 43623, "poisoned": 43624, "loth": 43625, "agna": 43626, "adelrey": 43627, "guardia": 43628, "polishing": 43629, "peacekeeping": 43630, "dall": 43631, "pisa": 43632, "lapland": 43633, "processors": 43634, "deandre": 43635, "sobs": 43636, "ponce": 43637, "drains": 43638, "cbe": 43639, "ðŁİ„:": 43640, "splash": 43641, "meatball": 43642, "fontana": 43643, "worcestershirehour": 43644, "nev": 43645, "brisk": 43646, "bint": 43647, "acr": 43648, "pox": 43649, "cayenne": 43650, "skrillex": 43651, "jfc": 43652, "hahahahahahaha": 43653, "glas": 43654, "engul": 43655, "temporal": 43656, "onized": 43657, "concre": 43658, "compose": 43659, "vibrations": 43660, "planters": 43661, "fert": 43662, "criticalrolefanart": 43663, "tbli": 43664, "schallenge": 43665, "huckabee": 43666, "municipal": 43667, "iambic": 43668, "radios": 43669, "nevis": 43670, "durability": 43671, "mccla": 43672, "horseback": 43673, "institutes": 43674, "fulfill": 43675, "attach": 43676, "ateur": 43677, "akan": 43678, "resisting": 43679, "illumination": 43680, "handle": 43681, "haircare": 43682, "oment": 43683, "macleod": 43684, "kaiser": 43685, "gno": 43686, "beardown": 43687, "lyf": 43688, "glomer": 43689, "distortion": 43690, "zm": 43691, "sank": 43692, "roosters": 43693, "isnow": 43694, "asports": 43695, "agen": 43696, "woken": 43697, "stgeorge": 43698, "romper": 43699, "myle": 43700, "economists": 43701, "ruto": 43702, "twill": 43703, "healthand": 43704, "dito": 43705, "wsl": 43706, "tairp": 43707, "prakash": 43708, "micheal": 43709, "hts": 43710, "wrights": 43711, "katsu": 43712, "fiorentina": 43713, "defenseman": 43714, "ditch": 43715, "varsity": 43716, "texanscheer": 43717, "baham": 43718, "scanned": 43719, "weil": 43720, "seductive": 43721, "ðŁijįðŁı½": 43722, "fue": 43723, "erwin": 43724, "davison": 43725, "terran": 43726, "moods": 43727, "woolf": 43728, "resource": 43729, "@.": 43730, "cush": 43731, "ðŁį°": 43732, "regression": 43733, "curled": 43734, "lazer": 43735, "joanne": 43736, "abbott": 43737, "moz": 43738, "downers": 43739, "mmmmmm": 43740, "valentina": 43741, "khair": 43742, "dreamt": 43743, "crook": 43744, "chek": 43745, "steaming": 43746, "nephews": 43747, "cleric": 43748, "asober": 43749, "indefinitely": 43750, "wye": 43751, "usnews": 43752, "joyce": 43753, "flushing": 43754, "wynonnaearp": 43755, "rondo": 43756, "kiss": 43757, "hotdog": 43758, "barns": 43759, "saxophon": 43760, "farley": 43761, "gasp": 43762, "decreasing": 43763, "alway": 43764, "pex": 43765, "lsd": 43766, "shift": 43767, "poutine": 43768, "razz": 43769, "rescuing": 43770, "niko": 43771, "hoch": 43772, "ccl": 43773, "uaap": 43774, "nts": 43775, "mcar": 43776, "ilwx": 43777, "conquering": 43778, "kettering": 43779, "sturdy": 43780, "delaying": 43781, "stok": 43782, "vanished": 43783, "cathar": 43784, "bingham": 43785, "inv": 43786, "ichiro": 43787, "hemo": 43788, "budgeting": 43789, "[...]": 43790, "bess": 43791, "sebastian": 43792, "slowed": 43793, "ðĿij": 43794, "muslim": 43795, "stuns": 43796, "actonclimate": 43797, "vea": 43798, "seton": 43799, "rosetta": 43800, "ount": 43801, "hardin": 43802, "fluid": 43803, "caw": 43804, "ðŁ„Ĥ": 43805, "yacht": 43806, "unl": 43807, "sphy": 43808, "provocative": 43809, "oric": 43810, "isback": 43811, "___": 43812, "nicolas": 43813, "gyan": 43814, "loose": 43815, "flin": 43816, "rebate": 43817, ":::": 43818, "!\"@": 43819, "comicon": 43820, "sheff": 43821, "downstream": 43822, "chichester": 43823, "beachlife": 43824, "momlife": 43825, "diabete": 43826, "arra": 43827, "vane": 43828, "oku": 43829, "yeo": 43830, "mango": 43831, "tryout": 43832, "appell": 43833, "heirs": 43834, "arjuna": 43835, "ddu": 43836, "naveen": 43837, "movic": 43838, "socialists": 43839, "sback": 43840, "criterion": 43841, "soyuz": 43842, "kher": 43843, "daz": 43844, "yolanda": 43845, "wineoclock": 43846, "reina": 43847, "onew": 43848, "leonard": 43849, "endez": 43850, "ubs": 43851, "supportlocal": 43852, "facilitated": 43853, "caramelized": 43854, "bpa": 43855, "vuelta": 43856, "mytho": 43857, "mami": 43858, "speare": 43859, "nbaplayoffs": 43860, "fevre": 43861, "nickjonas": 43862, "imprint": 43863, "cso": 43864, "craigslist": 43865, "lasalle": 43866, "gideon": 43867, "hadoop": 43868, "disregard": 43869, "wud": 43870, "tuc": 43871, "magee": 43872, "acoustics": 43873, "taa": 43874, "quie": 43875, "pola": 43876, "crt": 43877, "dwyer": 43878, "dissec": 43879, "capitol": 43880, "mention": 43881, "knoll": 43882, "heigh": 43883, "finders": 43884, "placements": 43885, "lse": 43886, "indira": 43887, "guri": 43888, "madhuridixit": 43889, "kingdoms": 43890, "iambicpent": 43891, "georgina": 43892, "jeky": 43893, "conflicting": 43894, "bayan": 43895, "agatha": 43896, "uphold": 43897, "dron": 43898, "vicar": 43899, "expat": 43900, "peripheral": 43901, "pessi": 43902, "faf": 43903, "ancestor": 43904, "?..": 43905, "widget": 43906, "punc": 43907, "commenced": 43908, "beavs": 43909, "airwaves": 43910, "addis": 43911, "poa": 43912, "desses": 43913, "coden": 43914, "vue": 43915, "rupee": 43916, "karin": 43917, "spock": 43918, "msy": 43919, "ะ": 43920, "prick": 43921, "fillmore": 43922, "tification": 43923, "thingsto": 43924, "sarde": 43925, "emile": 43926, "pereira": 43927, "nad": 43928, "brightening": 43929, "arresting": 43930, "woking": 43931, "uscg": 43932, "spill": 43933, "raspberrypi": 43934, "hugo": 43935, "itec": 43936, "isma": 43937, "cufflinks": 43938, "optimized": 43939, "occ": 43940, "miwx": 43941, "enka": 43942, "elited": 43943, "affordable": 43944, "sakh": 43945, "coronado": 43946, "hoh": 43947, "atul": 43948, "aioli": 43949, "jimcantore": 43950, "accounted": 43951, "vinay": 43952, "hermit": 43953, "grooves": 43954, "ranch": 43955, "rilla": 43956, "wetter": 43957, "outof": 43958, "veterin": 43959, "nikov": 43960, "kian": 43961, "fairbanks": 43962, "ramapho": 43963, "niti": 43964, "kko": 43965, "rusty": 43966, "nestle": 43967, "tvxq": 43968, "shaheer": 43969, "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, "pennant": 43971, "gemstones": 43972, "demdebate": 43973, "ðŁIJĬ": 43974, "autonews": 43975, "supportindiefilm": 43976, "macho": 43977, "vex": 43978, "newsat": 43979, "neti": 43980, "concessions": 43981, "candied": 43982, "yofthe": 43983, "macau": 43984, "dends": 43985, "cricketers": 43986, "saniti": 43987, "mariano": 43988, "ghat": 43989, "artoftheday": 43990, "”ľ": 43991, "egos": 43992, "genoa": 43993, "chatbots": 43994, "brier": 43995, "allabout": 43996, "monty": 43997, "spied": 43998, "rtr": 43999, "comfort": 44000, "snippets": 44001, "realtime": 44002, "grain": 44003, "examined": 44004, "enlightening": 44005, "ttu": 44006, "godbless": 44007, "releasethe": 44008, "singular": 44009, "kians": 44010, "haka": 44011, "sorren": 44012, "defect": 44013, "marg": 44014, "equities": 44015, "dorian": 44016, "suka": 44017, "perl": 44018, "aishwarya": 44019, "pullover": 44020, "precision": 44021, "fairway": 44022, "neve": 44023, "riveting": 44024, "villanova": 44025, "encom": 44026, "ako": 44027, "passionately": 44028, "europaleague": 44029, "siempre": 44030, "xvi": 44031, "enlightened": 44032, "cfr": 44033, "âĺħâĺħâĺħâĺħ": 44034, "wasteland": 44035, "isf": 44036, "newcomers": 44037, "emergency": 44038, "amphitheatre": 44039, "-.": 44040, "textbooks": 44041, "figurative": 44042, "tremb": 44043, "pesc": 44044, "abhin": 44045, "abbot": 44046, "acacia": 44047, "hards": 44048, "porsche": 44049, "kauai": 44050, "elisa": 44051, "carrick": 44052, "abou": 44053, "ellier": 44054, "bech": 44055, "neutron": 44056, "galapagos": 44057, "ruben": 44058, "innis": 44059, "howto": 44060, "nuns": 44061, "sabine": 44062, "iac": 44063, "clinched": 44064, "notori": 44065, "fives": 44066, "cairngor": 44067, "peri": 44068, "grc": 44069, "ðŁē¯ðŁē¯": 44070, "malm": 44071, "twelfth": 44072, "diff": 44073, "routines": 44074, "martyn": 44075, "linden": 44076, "synthesizer": 44077, "number": 44078, "gamecube": 44079, "falkirk": 44080, "byzantine": 44081, "queuing": 44082, "grill": 44083, "scalable": 44084, "charred": 44085, "routing": 44086, "herbali": 44087, "grizz": 44088, "ðŁĺŃðŁĺŃðŁĺŃ": 44089, "toll": 44090, "terminals": 44091, "lpc": 44092, "abd": 44093, "warmups": 44094, "removable": 44095, "¯\\": 44096, "vigo": 44097, "papaya": 44098, "neve": 44099, "lovingly": 44100, "jokers": 44101, "ibles": 44102, "ssett": 44103, "potenti": 44104, "pele": 44105, "gigi": 44106, "sadiq": 44107, "legacy": 44108, "sono": 44109, "rupees": 44110, "retarded": 44111, "elee": 44112, "parr": 44113, "fiance": 44114, "eyre": 44115, "sayers": 44116, "pendants": 44117, "maknae": 44118, "albans": 44119, "adapting": 44120, "pff": 44121, "puberty": 44122, "jiu": 44123, "ingrad": 44124, "hypocrite": 44125, "diplomats": 44126, "physical": 44127, "robby": 44128, "bonsai": 44129, "ãģ·": 44130, "fatt": 44131, "catalunya": 44132, "âľĸï¸ı": 44133, "roma": 44134, "moreland": 44135, "soe": 44136, "conversions": 44137, "stlblues": 44138, "sholm": 44139, "grassy": 44140, "prado": 44141, "onu": 44142, "assaulting": 44143, ">_": 44144, "settes": 44145, "disgraceful": 44146, "aphra": 44147, "âļ½ï¸ıâļ½ï¸ı": 44148, "प": 44149, "kiln": 44150, "goaltender": 44151, "sru": 44152, "philanthropist": 44153, "bals": 44154, "thn": 44155, "studen": 44156, "sandoval": 44157, "dogrescue": 44158, "elions": 44159, "assessed": 44160, "largo": 44161, "hectares": 44162, "shrm": 44163, "saif": 44164, "cleavage": 44165, "noches": 44166, "nene": 44167, "fatalities": 44168, "curing": 44169, "cleanser": 44170, "ales": 44171, "pvp": 44172, "southbank": 44173, "pizzeria": 44174, "marshals": 44175, "knife": 44176, "andover": 44177, "tblightning": 44178, "srsly": 44179, "oute": 44180, "digimon": 44181, "timesofindia": 44182, "promethe": 44183, "lebo": 44184, "fsu": 44185, "witz": 44186, "revere": 44187, "manas": 44188, "mamba": 44189, "chica": 44190, "guan": 44191, "exhibitor": 44192, "csrracing": 44193, "dere": 44194, "xxxxx": 44195, "gusta": 44196, "storytime": 44197, "stoney": 44198, "organics": 44199, "andu": 44200, "seam": 44201, "minogue": 44202, "anushkasharma": 44203, "aba": 44204, "ðŁİĻï¸ı": 44205, "ugandan": 44206, "chromatic": 44207, "assn": 44208, "documentaries": 44209, "sht": 44210, "rupaul": 44211, "loyd": 44212, "kats": 44213, "eus": 44214, "itech": 44215, "medusa": 44216, "panty": 44217, "kellogg": 44218, "etto": 44219, "tallade": 44220, "shaa": 44221, "dost": 44222, "pms": 44223, "mariana": 44224, "jester": 44225, "crooks": 44226, "ðŁĶ¬": 44227, "mindanao": 44228, "indhoven": 44229, "ðŁ¤ª": 44230, "lexi": 44231, "tvn": 44232, "janis": 44233, "cote": 44234, "ãģĨ": 44235, "serrano": 44236, "iwm": 44237, "ðŁIJ¬": 44238, "kke": 44239, "distributors": 44240, "capu": 44241, "counterfeit": 44242, "campsite": 44243, "aggie": 44244, "ðŁĺ¼": 44245, "chhattisgarh": 44246, "~@": 44247, "stateu": 44248, "sandi": 44249, "preventable": 44250, "cls": 44251, "canne": 44252, "mmc": 44253, "iver": 44254, "saharan": 44255, "palis": 44256, "nightout": 44257, "dos": 44258, "apia": 44259, "abscbn": 44260, "managerial": 44261, "arose": 44262, "mowx": 44263, "arosa": 44264, "ðŁĮ³": 44265, "underdog": 44266, "remover": 44267, "astronomers": 44268, "lentils": 44269, "suscep": 44270, "smoother": 44271, "pendleton": 44272, "faucet": 44273, "emory": 44274, "dalmati": 44275, "afcb": 44276, "ticus": 44277, "exempt": 44278, "enrol": 44279, "dheim": 44280, "ðŁIJº": 44281, "restriction": 44282, "starfish": 44283, "stow": 44284, "snorkel": 44285, "thunderbirds": 44286, "shead": 44287, "homosexual": 44288, "dyn": 44289, "asli": 44290, "andretti": 44291, "douche": 44292, "domo": 44293, "tarmac": 44294, "slumber": 44295, "pronto": 44296, "firstdayof": 44297, "miniature": 44298, "mariachi": 44299, "argus": 44300, "recommending": 44301, "mobiles": 44302, "ince": 44303, "illustrious": 44304, "orc": 44305, "adverts": 44306, "grits": 44307, "weasel": 44308, "pagoda": 44309, "overpass": 44310, "greys": 44311, "maximus": 44312, "armagh": 44313, "woodland": 44314, "sunni": 44315, "ðŁēī": 44316, "ëĿ": 44317, "tione": 44318, "socio": 44319, "hos": 44320, "ð٤Ĺð٤Ĺ": 44321, "windsor": 44322, "subsequent": 44323, "munchies": 44324, "idh": 44325, "excluding": 44326, "emi": 44327, "cuth": 44328, "zai": 44329, "weekdays": 44330, "lawsuits": 44331, "barnard": 44332, "ت": 44333, "petting": 44334, "netes": 44335, "mulligan": 44336, "pharmacists": 44337, "raquel": 44338, "eton": 44339, "cranston": 44340, "gilded": 44341, "cleary": 44342, "ceph": 44343, "raa": 44344, "pamper": 44345, "lombardi": 44346, "asin": 44347, "sherry": 44348, "prod": 44349, "forte": 44350, "arianism": 44351, "buffalobills": 44352, "æľ¬": 44353, "ðŁĶ„#": 44354, "uuu": 44355, "justices": 44356, "carina": 44357, "natin": 44358, "maslow": 44359, "drooling": 44360, "cognac": 44361, "camber": 44362, "elong": 44363, "rdr": 44364, "inen": 44365, "convictions": 44366, "amuse": 44367, "trock": 44368, "harmless": 44369, "visitation": 44370, "genomic": 44371, "bland": 44372, "benoit": 44373, "chimp": 44374, "tuscaloosa": 44375, "greasy": 44376, "xpo": 44377, "gilt": 44378, "seq": 44379, "permitted": 44380, "christmaseve": 44381, "books": 44382, "mue": 44383, "oldschool": 44384, "humanright": 44385, "beati": 44386, "ðŁĶĿ": 44387, "shat": 44388, "sculpting": 44389, "hwan": 44390, "fernandes": 44391, "sciutto": 44392, "fuentes": 44393, "endeavors": 44394, "maidstone": 44395, "unparalleled": 44396, "shouted": 44397, "queenof": 44398, "merc": 44399, "bandic": 44400, "veda": 44401, "selangor": 44402, "pile": 44403, "jahan": 44404, "intimidating": 44405, "disappears": 44406, "clich": 44407, "zaha": 44408, "wurst": 44409, "hiv": 44410, "fodils": 44411, "cordless": 44412, "aaaaaa": 44413, "hydra": 44414, "belinda": 44415, "eels": 44416, "buf": 44417, "sustaining": 44418, "rugbyleague": 44419, "noc": 44420, "brigitte": 44421, "(ðŁĵ¸:": 44422, "trombone": 44423, "soothe": 44424, "smog": 44425, "adp": 44426, "stable": 44427, "ingley": 44428, "diagnose": 44429, "msg": 44430, "wess": 44431, "ticketing": 44432, "onee": 44433, "nswpol": 44434, "eup": 44435, "autopsy": 44436, "adityanath": 44437, "sundown": 44438, "riverfront": 44439, "siya": 44440, "pis": 44441, "hierarchy": 44442, "durango": 44443, "dijk": 44444, "renshaw": 44445, "heaps": 44446, "epidemi": 44447, "davidbowie": 44448, "internetof": 44449, "ddi": 44450, "nationality": 44451, "mbar": 44452, "airy": 44453, "winder": 44454, "walia": 44455, "elliott": 44456, "cx": 44457, "bavarian": 44458, "platt": 44459, "antw": 44460, "wiwx": 44461, "softer": 44462, "neha": 44463, "heller": 44464, "thand": 44465, "daniela": 44466, "boast": 44467, "degradation": 44468, "ðŁē¦ðŁē¦": 44469, "transforming": 44470, "mane": 44471, "avut": 44472, "ðŁĺĪðŁĺĪ": 44473, "voter": 44474, "thee": 44475, "tate": 44476, "puff": 44477, "indoor": 44478, "soproud": 44479, "boyce": 44480, "borisjohnson": 44481, "waitin": 44482, "immunology": 44483, "ðŁıĨðŁıĨðŁıĨ": 44484, "âĿĮ": 44485, "streetfood": 44486, "lizasober": 44487, "cavalier": 44488, "celia": 44489, "needle": 44490, "motoring": 44491, "gato": 44492, ",)": 44493, "rade": 44494, "harvest": 44495, "tms": 44496, "jarpad": 44497, "oney": 44498, "airmen": 44499, "vre": 44500, "impairment": 44501, "abhishek": 44502, "snoop": 44503, "lant": 44504, "famously": 44505, "blou": 44506, "sze": 44507, "gander": 44508, "untouch": 44509, "tuf": 44510, "deejay": 44511, "collateral": 44512, "bind": 44513, "ðŁļ©": 44514, "pinning": 44515, "icn": 44516, "';": 44517, "theeconomist": 44518, "ultram": 44519, "worldwaterday": 44520, "tipoff": 44521, "thei": 44522, "feeders": 44523, "campaign": 44524, "scumb": 44525, "dayweekend": 44526, "yom": 44527, "pedic": 44528, "hough": 44529, "psv": 44530, "plin": 44531, "onde": 44532, "bostonmarathon": 44533, "azzy": 44534, "*_*": 44535, "conley": 44536, "thiago": 44537, "hooo": 44538, "galerie": 44539, "lucid": 44540, "jett": 44541, "glitz": 44542, "finalfantasy": 44543, "achievers": 44544, "yung": 44545, "peregrine": 44546, "ophi": 44547, "dames": 44548, "biomar": 44549, "âĺĢï¸ıâĺĢï¸ı": 44550, "skc": 44551, "lics": 44552, "flank": 44553, "arrahman": 44554, "hoof": 44555, "upholstery": 44556, "tats": 44557, "woz": 44558, "¿": 44559, "snoring": 44560, "raer": 44561, "lju": 44562, "apd": 44563, "plating": 44564, "kanu": 44565, "imation": 44566, "fragrances": 44567, "mra": 44568, "moray": 44569, "mott": 44570, "immuni": 44571, "hearties": 44572, "bhopal": 44573, "timers": 44574, "gata": 44575, "colorway": 44576, "carnation": 44577, "winget": 44578, "sighs": 44579, "sville": 44580, "optimist": 44581, "chateau": 44582, "olympians": 44583, "cio": 44584, "singersongwriter": 44585, "nyo": 44586, "fibers": 44587, "burch": 44588, "agro": 44589, "milne": 44590, "igbo": 44591, "cramer": 44592, "ationals": 44593, "danube": 44594, "padma": 44595, "normani": 44596, "enforced": 44597, "breck": 44598, "boehner": 44599, "arden": 44600, "surrendered": 44601, "prosthetic": 44602, "oma": 44603, "hailed": 44604, "calculations": 44605, "wfa": 44606, "bib": 44607, "fcblive": 44608, "fonda": 44609, "westcoast": 44610, "quests": 44611, "friendly": 44612, "towie": 44613, "fitch": 44614, "balot": 44615, "stardom": 44616, "scratching": 44617, "hosa": 44618, "thika": 44619, "oven": 44620, "stroke": 44621, "outpost": 44622, "pharmaceuticals": 44623, "hikari": 44624, "muy": 44625, "afd": 44626, "fallontonight": 44627, "squat": 44628, "oru": 44629, "drained": 44630, "chocolat": 44631, "민": 44632, "worths": 44633, "rib": 44634, "muj": 44635, "thats": 44636, "residente": 44637, "itel": 44638, "boost": 44639, "migos": 44640, "mulled": 44641, "laa": 44642, "etsyshop": 44643, "donkeys": 44644, "mek": 44645, "ptc": 44646, "flinders": 44647, "ehs": 44648, "rohit": 44649, "muir": 44650, "gad": 44651, "compositions": 44652, "ÄĨĻ": 44653, "combustion": 44654, "ikh": 44655, "yemeni": 44656, "waved": 44657, "garci": 44658, "akos": 44659, "oods": 44660, "fusion": 44661, "seque": 44662, "slan": 44663, "plur": 44664, "kicchasu": 44665, "shenando": 44666, "sams": 44667, "worlden": 44668, "horowitz": 44669, "withme": 44670, "microbes": 44671, "kki": 44672, "ðŁēĶðŁēĶ": 44673, "wsu": 44674, "patchwork": 44675, "freer": 44676, "yaki": 44677, "theart": 44678, "symbolism": 44679, "miler": 44680, "btn": 44681, "mabu": 44682, "sidekick": 44683, "motivates": 44684, "sagitt": 44685, "naturals": 44686, "serviced": 44687, "psori": 44688, "paola": 44689, "quig": 44690, "ibadan": 44691, "giggs": 44692, "ë³": 44693, "scientology": 44694, "sioux": 44695, "salamat": 44696, "dres": 44697, "cadbury": 44698, "dhawan": 44699, "ción": 44700, "_'": 44701, "swapping": 44702, "mariska": 44703, "jamesbond": 44704, "explosives": 44705, "ayles": 44706, "afer": 44707, "sagu": 44708, "censor": 44709, "toma": 44710, "jefferson": 44711, "ringed": 44712, "partist": 44713, "irresponsible": 44714, "aguilar": 44715, "vacay": 44716, "equitable": 44717, "altrincham": 44718, "acur": 44719, "manish": 44720, "germin": 44721, "schooled": 44722, "putter": 44723, "edad": 44724, "naval": 44725, "toasty": 44726, "solareclipse": 44727, "dishu": 44728, "coyne": 44729, "acco": 44730, "muck": 44731, "maran": 44732, "elos": 44733, "lender": 44734, "croix": 44735, "worthless": 44736, "haber": 44737, "gunmen": 44738, "ðŁįĵ": 44739, "zenith": 44740, "tenders": 44741, "hurst": 44742, "holtz": 44743, "italians": 44744, "carlow": 44745, "ucd": 44746, "characteristic": 44747, "bung": 44748, "avl": 44749, "uth": 44750, "sasia": 44751, "rsl": 44752, "redman": 44753, "neighboring": 44754, "greenpeace": 44755, "stips": 44756, "followparty": 44757, "ygk": 44758, "enos": 44759, "omnibus": 44760, "naissance": 44761, "chrissy": 44762, "secure": 44763, "callback": 44764, "jihoon": 44765, "memory": 44766, "blocker": 44767, "lanta": 44768, "daffodils": 44769, "bilt": 44770, "fferty": 44771, "faust": 44772, "iec": 44773, "nipples": 44774, "sog": 44775, "mnd": 44776, "jaguar": 44777, "boldly": 44778, "abpoli": 44779, "proposition": 44780, "gunsense": 44781, "evansville": 44782, "cutters": 44783, "wego": 44784, "doun": 44785, "dox": 44786, "stallions": 44787, "kaj": 44788, "shippers": 44789, "jawa": 44790, "volo": 44791, "leven": 44792, "paprika": 44793, "kovich": 44794, "jordi": 44795, "inductees": 44796, "appalling": 44797, "dialysis": 44798, "alleviate": 44799, "âĢĶâĢĶ": 44800, "pieter": 44801, "midwi": 44802, "qtr": 44803, "juliette": 44804, "intermission": 44805, "hawks": 44806, "actment": 44807, "oneill": 44808, "klin": 44809, "vamps": 44810, "famous": 44811, "could": 44812, "automobi": 44813, "daan": 44814, "westend": 44815, "ellip": 44816, "nhc": 44817, "melanch": 44818, "webseries": 44819, "tongue": 44820, "snatched": 44821, "smyth": 44822, "tangible": 44823, "sli": 44824, "easing": 44825, "barstool": 44826, "overlay": 44827, "affordability": 44828, "tinged": 44829, "teras": 44830, "ayush": 44831, "wannaone": 44832, "rhine": 44833, "dana": 44834, "shana": 44835, "kendal": 44836, "fertile": 44837, "wir": 44838, "repleni": 44839, "larvae": 44840, "isro": 44841, "convos": 44842, "abbrevi": 44843, "ucc": 44844, "hungry": 44845, "burrows": 44846, "ager": 44847, "navi": 44848, "matin": 44849, "duper": 44850, "cern": 44851, "madon": 44852, "ķï¸ı": 44853, "éģ": 44854, "tups": 44855, "hyatt": 44856, "shep": 44857, "fridaynight": 44858, "wiser": 44859, "heidi": 44860, "hatton": 44861, "pgh": 44862, "fountain": 44863, "wristbands": 44864, "ahmadiyya": 44865, "aerial": 44866, "subscribed": 44867, "solos": 44868, "mace": 44869, "slayed": 44870, "forfe": 44871, "dulce": 44872, "christmass": 44873, "arunjaitley": 44874, "violate": 44875, "obstru": 44876, "nieces": 44877, "wvu": 44878, "idyl": 44879, "faze": 44880, "preserves": 44881, "infringe": 44882, "premiers": 44883, "intervals": 44884, "agency": 44885, "(©": 44886, "standalone": 44887, "dimes": 44888, "boer": 44889, "parameters": 44890, "getit": 44891, "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, "tulane": 44893, "forgiven": 44894, "scoll": 44895, "mbps": 44896, "smashbros": 44897, "robbi": 44898, "primavera": 44899, "alist": 44900, "ghostly": 44901, "ayat": 44902, "yeats": 44903, "impressionist": 44904, "earphones": 44905, "caulfield": 44906, "waikiki": 44907, "salute": 44908, "scou": 44909, "muay": 44910, "louisvuitton": 44911, "bakhta": 44912, "adog": 44913, "inventions": 44914, "hurd": 44915, "foreclo": 44916, "streamline": 44917, "thalaivar": 44918, "chsnews": 44919, "willard": 44920, "tsn": 44921, "europarl": 44922, "crusher": 44923, "mysore": 44924, "grower": 44925, "raping": 44926, "patti": 44927, "gden": 44928, "smw": 44929, "mufti": 44930, "kidman": 44931, "abr": 44932, "sounders": 44933, "skeptical": 44934, "ðŁĶİ": 44935, "sundar": 44936, "ime": 44937, "ferg": 44938, "featherweight": 44939, "arlington": 44940, "pasqu": 44941, "agazine": 44942, "wearable": 44943, "natic": 44944, "mcclure": 44945, "intermitt": 44946, "horde": 44947, "sixties": 44948, "carte": 44949, "bhav": 44950, "zeal": 44951, "experiential": 44952, "adorned": 44953, "sommer": 44954, "enote": 44955, "hypothesis": 44956, "stinky": 44957, "proto": 44958, "deadlines": 44959, "vogel": 44960, "musings": 44961, "moncton": 44962, "guter": 44963, "fle": 44964, "acion": 44965, "voiceof": 44966, "tasha": 44967, "inhabitants": 44968, "typeface": 44969, "sba": 44970, "btsx": 44971, "ðŁĶē": 44972, "worx": 44973, "uhc": 44974, "joko": 44975, "cellars": 44976, "goro": 44977, "continuum": 44978, "...&": 44979, "weathercee": 44980, "hap": 44981, "srk": 44982, "risers": 44983, "lonelyplanet": 44984, "unnamed": 44985, "coeur": 44986, "ðŁįĮ": 44987, "theworld": 44988, "ilike": 44989, "fasten": 44990, "amigo": 44991, "riba": 44992, "ramaphosa": 44993, "staffers": 44994, "hadley": 44995, "??\"": 44996, "fiore": 44997, "salut": 44998, "huff": 44999, "bezos": 45000, "Ƒĭ": 45001, "rader": 45002, "kamala": 45003, "inline": 45004, "fillers": 45005, "umatic": 45006, "allin": 45007, "shatter": 45008, "rein": 45009, "oku": 45010, "chases": 45011, "flagged": 45012, "babymetal": 45013, "waterstones": 45014, "tsb": 45015, "cutout": 45016, "ophel": 45017, "aama": 45018, "rockabilly": 45019, "stolic": 45020, "jetblue": 45021, "ichick": 45022, "downton": 45023, "uzbekistan": 45024, "patna": 45025, "laq": 45026, "grange": 45027, ")_/": 45028, "subsidi": 45029, "scp": 45030, "newscast": 45031, "itsa": 45032, "tweetyour": 45033, "emor": 45034, "archaeologists": 45035, "unification": 45036, "porta": 45037, "qx": 45038, "protectors": 45039, "prohib": 45040, "charisma": 45041, "cartag": 45042, "renfre": 45043, "sculpt": 45044, "guwahati": 45045, "dema": 45046, "boop": 45047, "unfpa": 45048, "dexter": 45049, "layla": 45050, "alleges": 45051, "soups": 45052, "neveragain": 45053, "lys": 45054, "calc": 45055, "baroness": 45056, "visualize": 45057, "gerber": 45058, "absorbed": 45059, "iers": 45060, "ahan": 45061, "fontein": 45062, "detectors": 45063, "verstappen": 45064, "svc": 45065, "formulated": 45066, "acdc": 45067, "lix": 45068, "incompetent": 45069, "bhk": 45070, "lourdes": 45071, "waterhouse": 45072, "snowed": 45073, "appreciative": 45074, "sigma": 45075, "lizasoberano": 45076, "penned": 45077, "paycheck": 45078, "tallinn": 45079, "fancafe": 45080, "parisi": 45081, "avalley": 45082, "vig": 45083, "rufc": 45084, "hardship": 45085, "socute": 45086, "poise": 45087, "ì¹": 45088, "rothschild": 45089, "kly": 45090, "????????": 45091, "lhp": 45092, "ilay": 45093, "fhs": 45094, "amad": 45095, "ideals": 45096, "bradbury": 45097, "balboa": 45098, "nicot": 45099, "kidnap": 45100, "wolve": 45101, "tasmanian": 45102, "opt": 45103, "matthias": 45104, "ãĄ³ãĤ": 45105, "supermarkets": 45106, "mylittlepony": 45107, "melee": 45108, "lister": 45109, "groun": 45110, "fedora": 45111, "kindness": 45112, "enen": 45113, "brahms": 45114, "¯\\_(": 45115, "roswell": 45116, "marlene": 45117, "icu": 45118, "reformation": 45119, "orail": 45120, "hebrides": 45121, "disparities": 45122, "terracotta": 45123, "swallows": 45124, "reid": 45125, "influencing": 45126, "fluor": 45127, "dene": 45128, "tumour": 45129, "blondes": 45130, "thunderbird": 45131, "sheva": 45132, "mogadishu": 45133, "kab": 45134, "creeps": 45135, "iving": 45136, "eneed": 45137, "annoy": 45138, "âĶĢ": 45139, "intrigue": 45140, "enquiry": 45141, "araj": 45142, "tural": 45143, "kubernetes": 45144, "endlessly": 45145, "dividends": 45146, "tora": 45147, "tish": 45148, "commemorates": 45149, "unra": 45150, "trib": 45151, "ponty": 45152, "nem": 45153, "dissent": 45154, "brewingco": 45155, "ðŁĺ½": 45156, "normali": 45157, "biof": 45158, "(...": 45159, "chillen": 45160, "주": 45161, "mellon": 45162, "avis": 45163, "mccormack": 45164, "ingra": 45165, "enriched": 45166, "customerexperience": 45167, "testosterone": 45168, "snug": 45169, "setti": 45170, "geronimo": 45171, "inquirer": 45172, "breaches": 45173, "verything": 45174, "blooming": 45175, "mura": 45176, "dispos": 45177, "bide": 45178, "deva": 45179, "shadesof": 45180, "intrin": 45181, "shev": 45182, "sven": 45183, "nayanthara": 45184, "ganesha": 45185, "cws": 45186, "berta": 45187, "labelled": 45188, "useum": 45189, "nicknamed": 45190, "mahan": 45191, "caruso": 45192, "apur": 45193, "ðŁijĨ": 45194, "wq": 45195, "orphanage": 45196, "discarded": 45197, "magnu": 45198, "lue": 45199, "jeon": 45200, "bridgeport": 45201, "pacing": 45202, "mercury": 45203, "(ðŁĵ¸": 45204, "marxist": 45205, "amphibious": 45206, "transplantation": 45207, "stitching": 45208, "thenburg": 45209, "gradual": 45210, "ãĤĮ": 45211, "roft": 45212, "mails": 45213, "inec": 45214, "guyana": 45215, "doppelg": 45216, "vero": 45217, "rewrite": 45218, "headless": 45219, "harbaugh": 45220, "gateway": 45221, "carsforsale": 45222, "swi": 45223, "stis": 45224, "macht": 45225, "unde": 45226, "surabaya": 45227, "stapleton": 45228, "nurturing": 45229, "milner": 45230, "yao": 45231, "lmaoooo": 45232, "kosh": 45233, "arsenal": 45234, "kame": 45235, "erry": 45236, "arroyo": 45237, "dismisses": 45238, "rubbed": 45239, "rcb": 45240, "lewd": 45241, "dilu": 45242, "andor": 45243, "vide": 45244, "urin": 45245, "intersec": 45246, "haar": 45247, "alb": 45248, "yearswith": 45249, "appleton": 45250, "éal": 45251, "ullivan": 45252, "succu": 45253, "monterrey": 45254, "dmx": 45255, "artemis": 45256, "ronnie": 45257, "farmland": 45258, "sfootball": 45259, "grotto": 45260, "anthi": 45261, "ãĢģ": 45262, "à®Ł": 45263, "vidya": 45264, "jimmyfallon": 45265, "àµį": 45266, "tzer": 45267, "gravitational": 45268, "wthr": 45269, "uhhh": 45270, "ehr": 45271, "tinker": 45272, "tijuana": 45273, "scranton": 45274, "ramcharan": 45275, "barclay": 45276, "revan": 45277, "msi": 45278, "kap": 45279, "wrs": 45280, "wethenorth": 45281, "toral": 45282, "satu": 45283, "grom": 45284, "facep": 45285, "erickson": 45286, "zyn": 45287, "sedge": 45288, "oodle": 45289, "spursofficial": 45290, "dsp": 45291, "sicilian": 45292, "solihull": 45293, "receivers": 45294, "ladakh": 45295, "hendrick": 45296, "theri": 45297, "presiding": 45298, "mcguinness": 45299, "litters": 45300, "gunnar": 45301, "ghoul": 45302, "wib": 45303, "ntv": 45304, "karo": 45305, "frock": 45306, "blau": 45307, "amplify": 45308, "allis": 45309, "ullah": 45310, "memoirs": 45311, "khloe": 45312, "interceptions": 45313, "petday": 45314, "looney": 45315, "confin": 45316, "chay": 45317, "piyushgoyal": 45318, "frequencies": 45319, "utz": 45320, "eventual": 45321, "warmly": 45322, "oblivion": 45323, "anka": 45324, "tait": 45325, "âĿ¤ï¸ı.": 45326, "directorial": 45327, "rulers": 45328, "princes": 45329, "muck": 45330, "sturridge": 45331, "deuce": 45332, "abridged": 45333, "baguette": 45334, "uncles": 45335, "pendu": 45336, "minding": 45337, "forrester": 45338, "avila": 45339, "waller": 45340, "wallstreet": 45341, "mentor": 45342, "hino": 45343, "highway": 45344, "cromwell": 45345, "fanartfriday": 45346, "mbi": 45347, "coyle": 45348, "ahi": 45349, "trove": 45350, "spiegel": 45351, "paytm": 45352, "mcintosh": 45353, "jansen": 45354, "niti": 45355, "nashville": 45356, "leno": 45357, "leicestershire": 45358, "legos": 45359, "dict": 45360, "ðŁĵ½": 45361, "spad": 45362, "beverlyhills": 45363, "syrah": 45364, "separates": 45365, "zain": 45366, "unfit": 45367, "drags": 45368, "tania": 45369, "overflowing": 45370, "hrithik": 45371, "hawthorn": 45372, "zani": 45373, "macfar": 45374, "fide": 45375, "totem": 45376, "peds": 45377, "fundamentally": 45378, "calico": 45379, "sinner": 45380, "jä": 45381, "hilde": 45382, "dsd": 45383, "tenay": 45384, "tahit": 45385, "milf": 45386, "lieb": 45387, "informing": 45388, "uplift": 45389, "rael": 45390, "mortgages": 45391, "lect": 45392, "iiii": 45393, "guillaume": 45394, "composites": 45395, "oldsmobile": 45396, "lend": 45397, "garth": 45398, "commish": 45399, "baptized": 45400, "scorpions": 45401, "rucker": 45402, "bringbackour": 45403, "alliance": 45404, "thalapathy": 45405, "tali": 45406, "spans": 45407, "eridge": 45408, "witherspoon": 45409, "linda": 45410, "skylar": 45411, "korn": 45412, "homs": 45413, "Äį": 45414, "silenced": 45415, "caffe": 45416, "arty": 45417, "distinguish": 45418, "towed": 45419, "pung": 45420, "jessica": 45421, "earnest": 45422, "beaufort": 45423, "tama": 45424, "studyabroad": 45425, "sikhs": 45426, "newbie": 45427, "navratri": 45428, "marble": 45429, "lounging": 45430, "litter": 45431, "dalit": 45432, "sosa": 45433, "izes": 45434, "grade": 45435, "compromising": 45436, "triton": 45437, "detta": 45438, "vj": 45439, "chauffe": 45440, "spectral": 45441, "powered": 45442, "montessori": 45443, "articulate": 45444, "halton": 45445, "alco": 45446, "yey": 45447, "mntwins": 45448, "acounty": 45449, "ðŁijıðŁı¾": 45450, "âīĪ": 45451, "madmen": 45452, "kala": 45453, "grum": 45454, "chik": 45455, "atis": 45456, "sume": 45457, "akhtar": 45458, "jobsearch": 45459, "highlighter": 45460, "boath": 45461, "âĦ¹": 45462, "tarzan": 45463, "lambo": 45464, "âĽĦï¸ı": 45465, "oxfam": 45466, "dumpster": 45467, "pretzels": 45468, "macos": 45469, "inclined": 45470, "factual": 45471, "advertisers": 45472, "shui": 45473, "puree": 45474, "mlpfi": 45475, "antidote": 45476, "capo": 45477, "pastr": 45478, "mercado": 45479, "button": 45480, "armin": 45481, "agg": 45482, "lolla": 45483, "horribly": 45484, "errands": 45485, "christophe": 45486, "timesnow": 45487, "mondaymotiv": 45488, "liss": 45489, "scandals": 45490, "mci": 45491, "disproportion": 45492, "âĺİ": 45493, "surpass": 45494, "samaritan": 45495, "sotho": 45496, "purest": 45497, "flatt": 45498, "triviatuesday": 45499, "delectable": 45500, "leopold": 45501, "hermione": 45502, "choudhary": 45503, "enrich": 45504, "””": 45505, "subsidiary": 45506, "inequalities": 45507, "bachelor": 45508, "autoimmune": 45509, "lakota": 45510, "ihop": 45511, "adjec": 45512, "thesimpsons": 45513, "shes": 45514, "sek": 45515, "gretchen": 45516, "upstream": 45517, "hinakhan": 45518, "copernic": 45519, "xtina": 45520, "lug": 45521, "toughness": 45522, "ead": 45523, "clipped": 45524, "bius": 45525, "slv": 45526, "fahren": 45527, "deepak": 45528, "cau": 45529, "xan": 45530, "immature": 45531, "digni": 45532, "bobs": 45533, "shredding": 45534, "buttery": 45535, "accommodations": 45536, "deven": 45537, "chunks": 45538, "superleague": 45539, "skybet": 45540, "kildare": 45541, "jeet": 45542, "ëį": 45543, "cek": 45544, "wrecks": 45545, "propane": 45546, "ohl": 45547, "tbd": 45548, "quoi": 45549, "trumpp": 45550, "mimo": 45551, "reluctant": 45552, "verne": 45553, "oic": 45554, "magh": 45555, "arnau": 45556, "sever": 45557, "lidge": 45558, "stairway": 45559, "kicchasudeep": 45560, "ðŁĶº": 45561, "machining": 45562, "aamaadmi": 45563, "oti": 45564, "cda": 45565, "alit": 45566, "pany": 45567, "installs": 45568, "acct": 45569, "eshop": 45570, "diem": 45571, "hardwell": 45572, "fulfillment": 45573, "scafe": 45574, "quack": 45575, "extracts": 45576, "sweetened": 45577, "fighton": 45578, "fdi": 45579, "dinger": 45580, "waltham": 45581, "usur": 45582, "referees": 45583, "seokjin": 45584, "grann": 45585, "afrin": 45586, "thn": 45587, "schaf": 45588, "parcels": 45589, "betis": 45590, "amarine": 45591, "noman": 45592, "khtar": 45593, "moritz": 45594, "coupling": 45595, "barons": 45596, "ðŁIJ¸": 45597, "ø": 45598, "slp": 45599, "sadler": 45600, "xander": 45601, "triad": 45602, "mcmillan": 45603, "khz": 45604, "dividing": 45605, "ìĹijìĨĮ": 45606, "daryl": 45607, "zedd": 45608, "leys": 45609, "plaques": 45610, "fluori": 45611, "tipperary": 45612, "onnell": 45613, "didier": 45614, "langford": 45615, "imc": 45616, "thesun": 45617, "birdies": 45618, "archa": 45619, "yessss": 45620, "tdi": 45621, "daria": 45622, "candace": 45623, "altam": 45624, "palaces": 45625, "chit": 45626, "santam": 45627, "eventful": 45628, "bookof": 45629, "adb": 45630, "monstax": 45631, "creole": 45632, "coel": 45633, "âĸ½": 45634, "wearen": 45635, "stennis": 45636, "sheath": 45637, "atism": 45638, "groningen": 45639, "mlpfim": 45640, "lepre": 45641, "wrongly": 45642, "rspca": 45643, "rendezvous": 45644, "acknowledging": 45645, "pelvic": 45646, "solicitor": 45647, "slays": 45648, "nuestra": 45649, "lod": 45650, "islander": 45651, "feroci": 45652, "fashionshow": 45653, "rass": 45654, "dgeon": 45655, "adolescents": 45656, "smashes": 45657, "negligence": 45658, "grateful": 45659, "vedere": 45660, "swoop": 45661, "ingl": 45662, "apolice": 45663, "vandalism": 45664, "gann": 45665, "joao": 45666, "disupdates": 45667, "zimbabwe": 45668, "underage": 45669, "radiance": 45670, "wof": 45671, "bourgeo": 45672, "plas": 45673, "crani": 45674, "ghue": 45675, "wreckem": 45676, "warrants": 45677, "reform": 45678, "jimmie": 45679, "atwood": 45680, "ysl": 45681, "neilhimself": 45682, "lbj": 45683, "iman": 45684, "tanto": 45685, "noisse": 45686, "verbs": 45687, "equipo": 45688, "altogether": 45689, "mament": 45690, "lice": 45691, "douglass": 45692, "tierney": 45693, "primed": 45694, "jhal": 45695, "furnitu": 45696, "brazili": 45697, "vill": 45698, "pastels": 45699, "nison": 45700, "uff": 45701, "paralysis": 45702, "jaye": 45703, "impo": 45704, "ðŁijģ": 45705, "strategically": 45706, "pakistanis": 45707, "wassup": 45708, "superbike": 45709, "thanku": 45710, "truelove": 45711, "shaikh": 45712, "israelis": 45713, "vip": 45714, "tog": 45715, "lien": 45716, "laker": 45717, "greyhounds": 45718, "culars": 45719, "bianchi": 45720, "balotelli": 45721, "arran": 45722, "loos": 45723, "strates": 45724, "hebron": 45725, "arvo": 45726, "sunderland": 45727, "theal": 45728, "tombstone": 45729, "sandman": 45730, "cpac": 45731, "thanksgiving": 45732, "lovehim": 45733, "latino": 45734, "anin": 45735, "akaif": 45736, "ĭãĤ": 45737, "torquay": 45738, "diest": 45739, "allianz": 45740, "ðŁĺķ": 45741, "golfclub": 45742, "cllr": 45743, "walcott": 45744, "schnau": 45745, "prompted": 45746, "nominating": 45747, "lennox": 45748, "valet": 45749, "monro": 45750, "mayward": 45751, "eph": 45752, "ðŁĶĶ": 45753, "interoper": 45754, "rda": 45755, "reflex": 45756, "armchair": 45757, "ê°ķ": 45758, "stripper": 45759, "porti": 45760, "pharm": 45761, "hamza": 45762, "nireland": 45763, "neue": 45764, "hpv": 45765, "portfoli": 45766, "sunburn": 45767, "frisbee": 45768, "beal": 45769, "baptiste": 45770, "xh": 45771, "tym": 45772, "prati": 45773, "overs": 45774, "hazrat": 45775, "desert": 45776, "derry": 45777, "usky": 45778, "emmett": 45779, "acharya": 45780, ")_/¯": 45781, "shud": 45782, "maya": 45783, "hamill": 45784, "raim": 45785, "nrc": 45786, "fittings": 45787, "curvy": 45788, "ðŁıĩ": 45789, "sterling": 45790, "à„Ģ": 45791, "walkin": 45792, "shortcuts": 45793, "milly": 45794, "astur": 45795, "alphabe": 45796, "pli": 45797, "pez": 45798, "missyou": 45799, "radford": 45800, "mlg": 45801, "taeyang": 45802, "notjustlakes": 45803, "dumps": 45804, "serendip": 45805, "leur": 45806, "raving": 45807, "ester": 45808, "depriv": 45809, "abscbn": 45810, "ðŁijĩðŁı»": 45811, "scarcity": 45812, "ocr": 45813, "meanings": 45814, "capt": 45815, "dahl": 45816, "fermentation": 45817, "brioche": 45818, "towin": 45819, "outlander": 45820, "massimo": 45821, "encro": 45822, "ðŁ„³": 45823, "built": 45824, "potam": 45825, "kiri": 45826, "tmw": 45827, "monitored": 45828, "kites": 45829, "peoplesvote": 45830, "grayson": 45831, "íģ¬": 45832, "afrika": 45833, "adies": 45834, "ivote": 45835, "gyne": 45836, "gannon": 45837, "dix": 45838, "cmc": 45839, "oural": 45840, "foxandfriends": 45841, "beli": 45842, "igne": 45843, "glan": 45844, "katrinakaif": 45845, "copolitics": 45846, "qualitative": 45847, "psi": 45848, "lucci": 45849, "discoura": 45850, "âĺ®": 45851, "kelli": 45852, "gautam": 45853, "caracas": 45854, "realest": 45855, "pula": 45856, "inus": 45857, "hilltop": 45858, "makeaw": 45859, "attenborough": 45860, "twy": 45861, "rarity": 45862, "peckham": 45863, "mahon": 45864, "cornelius": 45865, "clinicians": 45866, "tonline": 45867, "tbi": 45868, "paradise": 45869, "kasi": 45870, "inevit": 45871, "freshness": 45872, "collingwood": 45873, "lunatic": 45874, "defense": 45875, "copd": 45876, "infra": 45877, "wainwright": 45878, "sainsbury": 45879, "alabam": 45880, "tema": 45881, "laco": 45882, "checker": 45883, "relegated": 45884, "trent": 45885, "stalks": 45886, "huffpost": 45887, "bhubaneswar": 45888, "astral": 45889, "shareyour": 45890, "primrose": 45891, "hime": 45892, "catan": 45893, "endment": 45894, "endow": 45895, "clemens": 45896, "maloney": 45897, "hilary": 45898, "gametime": 45899, "denise": 45900, "collaborators": 45901, "bwo": 45902, "radicals": 45903, "guetta": 45904, "icion": 45905, "aua": 45906, "snapmatic": 45907, "satchel": 45908, "excavation": 45909, "baseman": 45910, "são": 45911, "gnation": 45912, "feld": 45913, "survey": 45914, "shahzad": 45915, "mast": 45916, "anirudhofficial": 45917, "trucker": 45918, "otago": 45919, "geograph": 45920, "ethel": 45921, "âļ”ï¸ıâļ”ï¸ı": 45922, "sver": 45923, "mutt": 45924, "internetofthings": 45925, "anchored": 45926, "whouse": 45927, "bangla": 45928, "balmain": 45929, "ç¹ĭãģ": 45930, "breakfa": 45931, "ÔĢ": 45932, "twister": 45933, "tetris": 45934, "cav": 45935, "stags": 45936, "gz": 45937, "aub": 45938, "stormed": 45939, "helens": 45940, "yarmouth": 45941, "stasy": 45942, "gustavo": 45943, "cosc": 45944, "vinson": 45945, "upp": 45946, "scricket": 45947, "assumptions": 45948, "appe": 45949, "nuh": 45950, "uer": 45951, "premise": 45952, "naga": 45953, "eamon": 45954, "coronary": 45955, "naf": 45956, "northside": 45957, "elmer": 45958, "rotar": 45959, "outlining": 45960, "elf": 45961, "resurg": 45962, "katelyn": 45963, "incan": 45964, "hysteria": 45965, "cee": 45966, "ambani": 45967, "prolly": 45968, "ĮãĤĬãģ": 45969, "axes": 45970, "sanjose": 45971, "rembrandt": 45972, "magpie": 45973, "evenly": 45974, "scorsese": 45975, "quaint": 45976, "fg": 45977, "bbuk": 45978, "indianfootball": 45979, "weareall": 45980, "spdwy": 45981, "pisces": 45982, "ecg": 45983, "âĺħâĺħâĺħâĺħâĺħ": 45984, "preorders": 45985, ":|": 45986, "nipple": 45987, "salazar": 45988, "jume": 45989, "jailbreak": 45990, "minn": 45991, "bassett": 45992, "zetta": 45993, "jeffree": 45994, "adjun": 45995, "ticon": 45996, "sandiego": 45997, "drinklocal": 45998, "cholera": 45999, "solicitors": 46000, "obo": 46001, "compost": 46002, "nian": 46003, "wra": 46004, "treach": 46005, "icic": 46006, "professional": 46007, "delve": 46008, "legate": 46009, "historia": 46010, "croissant": 46011, "connoisse": 46012, "namo": 46013, "palliative": 46014, "chemtrails": 46015, "iority": 46016, "globalwarming": 46017, "comicart": 46018, "behavioural": 46019, "rested": 46020, "lias": 46021, "climates": 46022, "ŁãģĦ": 46023, "rutland": 46024, "nourish": 46025, "menopause": 46026, "hotties": 46027, "dementi": 46028, "vespa": 46029, "melville": 46030, "analogue": 46031, "tzman": 46032, "strung": 46033, "imperfect": 46034, "glare": 46035, "circling": 46036, "rosberg": 46037, "reco": 46038, "ocity": 46039, "loire": 46040, "embe": 46041, "dossier": 46042, "neel": 46043, "nando": 46044, "mea": 46045, "galvani": 46046, "finesse": 46047, "agp": 46048, "berkeley": 46049, "asim": 46050, "âĺºâĺº": 46051, "quilted": 46052, "ishere": 46053, "unmatched": 46054, "potion": 46055, "forz": 46056, "atre": 46057, "selfies": 46058, "juliana": 46059, "ðŁļ¶": 46060, "âĸº": 46061, "melton": 46062, "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, "spinrilla": 46064, "purcell": 46065, "edp": 46066, "atleti": 46067, "tonyawards": 46068, "raja": 46069, "progno": 46070, "molten": 46071, "stuff": 46072, "pally": 46073, "nobelprize": 46074, "âĻ»ï¸ı": 46075, "spiritual": 46076, "speake": 46077, "sasha": 46078, "brium": 46079, "truss": 46080, "criticize": 46081, "assassinscreed": 46082, "yoruba": 46083, "ulo": 46084, "fireman": 46085, "workinprogress": 46086, "efcc": 46087, "flares": 46088, "robot": 46089, "hikers": 46090, "cll": 46091, "shadowing": 46092, "patsy": 46093, "lehman": 46094, "cns": 46095, "ı": 46096, "guadal": 46097, "à±į": 46098, "rape": 46099, "rhonda": 46100, "parallels": 46101, "sonja": 46102, "language": 46103, "landings": 46104, "zola": 46105, "cramps": 46106, "burning": 46107, "appraisal": 46108, "jolla": 46109, "hamm": 46110, "kasa": 46111, "gully": 46112, "fgo": 46113, "ulysses": 46114, "ribe": 46115, "ðŁēĦ": 46116, "ibu": 46117, "etienne": 46118, "briar": 46119, "finely": 46120, "combating": 46121, "yql": 46122, "gotham": 46123, "wechat": 46124, "topaz": 46125, "primaries": 46126, "lse": 46127, "izz": 46128, "hele": 46129, "disponible": 46130, "cystic": 46131, "belichick": 46132, "thrush": 46133, "kansascity": 46134, "geom": 46135, "solidi": 46136, "redbubble": 46137, "bystand": 46138, "cambridgeshire": 46139, "parfait": 46140, "astle": 46141, "owo": 46142, "indore": 46143, "stomping": 46144, "smelly": 46145, "ð٤ĸ": 46146, "locomo": 46147, "admitting": 46148, "holme": 46149, "clockwise": 46150, "minsk": 46151, "mcco": 46152, "forget": 46153, "evp": 46154, "camra": 46155, "abella": 46156, "yotes": 46157, "universityof": 46158, "méxico": 46159, "silverado": 46160, "ricket": 46161, "crombie": 46162, "puj": 46163, "eradicate": 46164, "delight": 46165, "ygo": 46166, "glamping": 46167, "vica": 46168, "duggan": 46169, "counters": 46170, "cfd": 46171, "scour": 46172, "reactjs": 46173, "puram": 46174, "parasites": 46175, "inki": 46176, "villen": 46177, "stella": 46178, "limbo": 46179, "angas": 46180, "kcr": 46181, "ðŁēļðŁēļðŁēļ": 46182, "vapori": 46183, "mumford": 46184, "oligar": 46185, "à¼": 46186, "aloo": 46187, "booties": 46188, "adr": 46189, "kelli": 46190, "drummers": 46191, "avici": 46192, "natureuk": 46193, "ronal": 46194, "intrac": 46195, "unsplash": 46196, "leche": 46197, "goma": 46198, "eline": 46199, "enviro": 46200, "bionic": 46201, "bueno": 46202, "mik": 46203, "avin": 46204, "starling": 46205, "empowers": 46206, "cakeday": 46207, "boycot": 46208, "ðŁēļðŁēļ": 46209, "ðŁĮ¸ðŁĮ¸": 46210, "vach": 46211, "mci": 46212, "fractures": 46213, "geri": 46214, "sking": 46215, "excluded": 46216, "luce": 46217, "jave": 46218, "iggy": 46219, "eviden": 46220, "akistan": 46221, "awn": 46222, "morals": 46223, "lucifer": 46224, "haban": 46225, "tumbling": 46226, "sundaymotivation": 46227, "mosley": 46228, "captainamerica": 46229, "schicago": 46230, "theone": 46231, "motd": 46232, "dts": 46233, "ðŁIJ¼": 46234, "repell": 46235, "iii": 46236, "locust": 46237, "geospatial": 46238, "mersey": 46239, "immerse": 46240, "descend": 46241, "bernade": 46242, "js": 46243, "boatsales": 46244, "winder": 46245, "crank": 46246, "singleton": 46247, "candidacy": 46248, "bena": 46249, "ðŁı»âĢį": 46250, "highlander": 46251, "olt": 46252, "kprs": 46253, "healthylifestyle": 46254, "fourteen": 46255, "endthe": 46256, "ithaca": 46257, "circulated": 46258, "rans": 46259, "prevalent": 46260, "havas": 46261, "splendor": 46262, "rooster": 46263, "kalamazoo": 46264, "jewellers": 46265, "ennedy": 46266, "rousey": 46267, "esy": 46268, "cannons": 46269, "ornamental": 46270, "////": 46271, "rendon": 46272, "winne": 46273, "molding": 46274, "eidmubarak": 46275, "countess": 46276, "simona": 46277, "hawa": 46278, "foes": 46279, "duster": 46280, "sbu": 46281, "portray": 46282, "marries": 46283, "goodday": 46284, "choco": 46285, "achiever": 46286, "ðŁĺ¹ðŁĺ¹": 46287, "preneur": 46288, "tramp": 46289, "tomi": 46290, "nbat": 46291, "gardenchat": 46292, "farrakhan": 46293, "everglades": 46294, "abru": 46295, "sousa": 46296, "sece": 46297, "homeswee": 46298, "terrestrial": 46299, "barit": 46300, "sridevi": 46301, "olu": 46302, "melinda": 46303, "frick": 46304, "candies": 46305, "ðŁĺŃðŁēķ": 46306, "qureshi": 46307, "familyfun": 46308, "exorcist": 46309, "cardinal": 46310, "nyt": 46311, "diesel": 46312, "cumulus": 46313, "capricorn": 46314, "siology": 46315, "lorna": 46316, "dougie": 46317, "andie": 46318, "supersport": 46319, "cfl": 46320, "пÑĢи": 46321, "sayang": 46322, "peek": 46323, "à¸Ĭ": 46324, "lobe": 46325, "jem": 46326, "inglis": 46327, "ggled": 46328, "csn": 46329, "amnesty": 46330, "chups": 46331, "baes": 46332, "sauer": 46333, "ðŁıIJ": 46334, "mongolian": 46335, "enet": 46336, "backstreet": 46337, "drilled": 46338, "accessing": 46339, "ceo": 46340, "bse": 46341, "aiken": 46342, "purr": 46343, "worsen": 46344, "wheres": 46345, "wark": 46346, "testifying": 46347, "buri": 46348, "blast": 46349, "awg": 46350, "ðŁĵĭ": 46351, "redefining": 46352, "hearing": 46353, "uci": 46354, "cmp": 46355, "boni": 46356, "tailoring": 46357, "taji": 46358, "nocchi": 46359, "emt": 46360, "stephenking": 46361, "neet": 46362, "complains": 46363, "campaigner": 46364, "luciano": 46365, "twilight": 46366, "tiesto": 46367, "passports": 46368, "floyd": 46369, "cathedr": 46370, "naked": 46371, "caregiver": 46372, "bcoz": 46373, "adecides": 46374, "kuri": 46375, "lyk": 46376, "braries": 46377, "drenched": 46378, "disclose": 46379, "ðŁēªðŁı½": 46380, "leblanc": 46381, "jetty": 46382, "garty": 46383, "chipmun": 46384, "bsu": 46385, "rhythmic": 46386, "icz": 46387, "frid": 46388, "annex": 46389, "amex": 46390, "soloist": 46391, "lancers": 46392, "arrowhead": 46393, "specification": 46394, "simulated": 46395, "nais": 46396, "inverte": 46397, "bowing": 46398, "worship": 46399, "fz": 46400, "aboss": 46401, "shaq": 46402, "ì¶ķ": 46403, "challengers": 46404, "anarch": 46405, "aamaadmiparty": 46406, "ãħĭãħĭãħĭ": 46407, "suffolk": 46408, "socorro": 46409, "snell": 46410, "cladding": 46411, "absorbing": 46412, "shawa": 46413, "participates": 46414, "ðŁįĶ": 46415, "bookstores": 46416, "baku": 46417, "seaport": 46418, "kojima": 46419, "gaby": 46420, "packard": 46421, "electrician": 46422, "letit": 46423, "mowing": 46424, "fawad": 46425, "youngjae": 46426, "hotmail": 46427, "mening": 46428, "urie": 46429, "intimacy": 46430, "conti": 46431, ":\")": 46432, "lifeisgood": 46433, "inciner": 46434, "idri": 46435, "craziness": 46436, "journos": 46437, "franchi": 46438, "bottlen": 46439, "alda": 46440, "ffes": 46441, "kx": 46442, "southwe": 46443, "aira": 46444, "clayton": 46445, "scoti": 46446, "fj": 46447, "briga": 46448, "ð٤ĺðŁı»": 46449, "demonstrators": 46450, "yz": 46451, "stork": 46452, "naq": 46453, "cascades": 46454, "travelchat": 46455, "plata": 46456, "padma": 46457, "franci": 46458, "attain": 46459, "batgirl": 46460, "lombard": 46461, "hoos": 46462, "ddos": 46463, "neonatal": 46464, "disclaimer": 46465, "rss": 46466, "rant": 46467, "disen": 46468, "texaste": 46469, "socal": 46470, "fractal": 46471, "camry": 46472, "strife": 46473, "snacking": 46474, "muh": 46475, "santander": 46476, "morons": 46477, "graf": 46478, "parades": 46479, "huston": 46480, "drupal": 46481, "miento": 46482, "kirstel": 46483, "hyde": 46484, "vomit": 46485, "fortified": 46486, "sphinx": 46487, "dav": 46488, "biryani": 46489, "winnings": 46490, "sbaseball": 46491, "merged": 46492, "lovelondon": 46493, "lingering": 46494, "dreambig": 46495, "carleton": 46496, "livelihood": 46497, "django": 46498, "astrid": 46499, "grids": 46500, "downe": 46501, "bruised": 46502, "sne": 46503, "scarecrow": 46504, "helium": 46505, "fnc": 46506, "biggs": 46507, "anter": 46508, "restorative": 46509, "empires": 46510, "abdel": 46511, "lifestyle": 46512, "kiwanis": 46513, "colloquium": 46514, "meen": 46515, "prick": 46516, "antique": 46517, "zeb": 46518, "mimic": 46519, "edmonds": 46520, "ðŁijĬðŁijĬ": 46521, "qing": 46522, "ppel": 46523, "mcgill": 46524, "interpreting": 46525, "âŀķ": 46526, "rashad": 46527, "doka": 46528, "narrator": 46529, "electromagnetic": 46530, "ashby": 46531, "saura": 46532, "irandeal": 46533, "âģīï¸ı": 46534, "krishnan": 46535, "indi": 46536, "ffen": 46537, "brea": 46538, "osman": 46539, "multinational": 46540, "chippe": 46541, "recruiters": 46542, "ausbiz": 46543, "pounding": 46544, "regen": 46545, "cursor": 46546, "refusal": 46547, "macs": 46548, "inak": 46549, "axial": 46550, "waifu": 46551, "upcycled": 46552, "hindustan": 46553, "cassini": 46554, "carlyle": 46555, "scratches": 46556, "reef": 46557, "manatee": 46558, "eatery": 46559, "ðŁĵ¢": 46560, "uncondition": 46561, "senpai": 46562, "onther": 46563, "comicbook": 46564, "prosciutto": 46565, "demar": 46566, "mise": 46567, "mage": 46568, "freec": 46569, "ayesha": 46570, "alder": 46571, "androidgames": 46572, "leyton": 46573, "hock": 46574, "doorway": 46575, "chicagofire": 46576, "aaliyah": 46577, "swelling": 46578, "bix": 46579, ".ðŁĺĤ": 46580, "evankirstel": 46581, "torpedo": 46582, "konstant": 46583, "genevieve": 46584, "maia": 46585, "hauser": 46586, "dotorg": 46587, "hideous": 46588, "fik": 46589, "spraw": 46590, "eek": 46591, "zappa": 46592, "wandered": 46593, "''": 46594, "rajan": 46595, "bambi": 46596, "($)": 46597, "widening": 46598, "toolbox": 46599, "sair": 46600, "illuminating": 46601, "prays": 46602, "outpatient": 46603, "iw": 46604, "dayo": 46605, "lob": 46606, "swfl": 46607, "shades": 46608, "gums": 46609, "cookin": 46610, "kodi": 46611, "griffin": 46612, "traumati": 46613, "stea": 46614, "slaughtered": 46615, "godbless": 46616, "airtime": 46617, "pseudo": 46618, "bsa": 46619, "hauled": 46620, "arif": 46621, "à¸Ńà¸ĩ": 46622, "lel": 46623, "wcpo": 46624, "militi": 46625, "charters": 46626, "worlda": 46627, "ruk": 46628, "kgs": 46629, "digitalindia": 46630, "isable": 46631, "idyllic": 46632, "espino": 46633, "marietta": 46634, "ebo": 46635, "teamcanada": 46636, "abour": 46637, "wilton": 46638, "rockstars": 46639, "favored": 46640, "physic": 46641, "wrinkle": 46642, "tbr": 46643, "dprint": 46644, "ballarat": 46645, "adal": 46646, "zey": 46647, "ðŁĺįðŁĶ„": 46648, "tomlin": 46649, "mtr": 46650, "palsy": 46651, "fenerbah": 46652, "tighten": 46653, "philia": 46654, "ironing": 46655, "ryu": 46656, "bant": 46657, "enquire": 46658, "cair": 46659, "aburger": 46660, "trun": 46661, "greenberg": 46662, "chauhan": 46663, "irina": 46664, "shani": 46665, "trendsetter": 46666, "prett": 46667, "zafar": 46668, "alove": 46669, "vici": 46670, "panic": 46671, "noo": 46672, "lustre": 46673, "disrupted": 46674, "ballis": 46675, "sonsof": 46676, "monsi": 46677, "instac": 46678, "akest": 46679, "ëĭ¤": 46680, "kwame": 46681, "horrormovies": 46682, "district": 46683, "saucy": 46684, "mban": 46685, "armies": 46686, "withdrawn": 46687, "medics": 46688, "loftus": 46689, "eroom": 46690, "bekind": 46691, "arns": 46692, "allon": 46693, "unison": 46694, "davids": 46695, "crat": 46696, "nicotine": 46697, "soor": 46698, "smx": 46699, "onco": 46700, "cosplaying": 46701, "zombies": 46702, "harms": 46703, "eger": 46704, "rosy": 46705, "moonshine": 46706, "fein": 46707, "cett": 46708, "dubrov": 46709, "regents": 46710, "benitez": 46711, "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, "stec": 46713, "malia": 46714, "prioritize": 46715, "iceland": 46716, "ftse": 46717, "vamo": 46718, "lamont": 46719, "homosexuality": 46720, "brees": 46721, "regui": 46722, "cbp": 46723, "tej": 46724, "skysports": 46725, "detergent": 46726, "shasta": 46727, "derel": 46728, "conservancy": 46729, "colorized": 46730, "accolades": 46731, "viso": 46732, "showyour": 46733, "nanow": 46734, "biceps": 46735, "usability": 46736, "bim": 46737, "dailysketch": 46738, "pearljam": 46739, "strangest": 46740, "megadeth": 46741, "broadcasts": 46742, "barren": 46743, "arton": 46744, "chriss": 46745, "configu": 46746, "lures": 46747, "isthe": 46748, "eul": 46749, "railwayana": 46750, "globalhealth": 46751, "gianni": 46752, "uaap": 46753, "slum": 46754, "consciously": 46755, "abre": 46756, "nup": 46757, "budget": 46758, "vada": 46759, "esch": 46760, "realness": 46761, "erased": 46762, "thunt": 46763, "bez": 46764, "armistice": 46765, "ðŁij¹": 46766, "shrun": 46767, "oled": 46768, "driverless": 46769, "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, "wondr": 46771, "skan": 46772, "salaam": 46773, "motherland": 46774, "hwang": 46775, "geno": 46776, "gangnam": 46777, "twright": 46778, "endorsing": 46779, "enic": 46780, "adoration": 46781, "paused": 46782, "patricks": 46783, "docked": 46784, "platte": 46785, "ffxv": 46786, "ethnicity": 46787, "autoshow": 46788, "sideshow": 46789, "afterlife": 46790, "relocated": 46791, "orphaned": 46792, "foodnetwork": 46793, "dareto": 46794, "andra": 46795, "slaps": 46796, "vlive": 46797, "swims": 46798, "reimagined": 46799, "mistle": 46800, "revise": 46801, "reality": 46802, "bharti": 46803, "ðŁēĻðŁēĽ": 46804, "latest": 46805, "proudest": 46806, "grasses": 46807, "lanyard": 46808, "freshest": 46809, "carcinoma": 46810, "anomaly": 46811, "ziegler": 46812, "sumner": 46813, "lyrix": 46814, "gorg": 46815, "isd": 46816, "avel": 46817, "swildlife": 46818, "mesqu": 46819, "johncena": 46820, "euroleague": 46821, "saber": 46822, "masterful": 46823, "yarra": 46824, "cognition": 46825, "jacobson": 46826, "abolic": 46827, "sirloin": 46828, "shukla": 46829, "mojito": 46830, "supere": 46831, "stweet": 46832, "mez": 46833, "esa": 46834, "rudolf": 46835, "gura": 46836, "whereyou": 46837, "ttm": 46838, "wins": 46839, "trustworthy": 46840, "nyk": 46841, "braden": 46842, "tabletop": 46843, "goodfood": 46844, "eson": 46845, "bek": 46846, "linguistic": 46847, "grays": 46848, "chath": 46849, "hcs": 46850, "moni": 46851, "deans": 46852, "cussions": 46853, "chell": 46854, "slows": 46855, "hemi": 46856, "dapp": 46857, "sharpie": 46858, "boosters": 46859, "aos": 46860, "strack": 46861, "sedona": 46862, "mueller": 46863, "hardwick": 46864, "ornate": 46865, "thora": 46866, "salud": 46867, "otwol": 46868, "chum": 46869, "miho": 46870, "forage": 46871, "thelittle": 46872, "tearful": 46873, "oneself": 46874, "mindy": 46875, "smg": 46876, "gmbh": 46877, "emerald": 46878, "ðŁĶ“âļªï¸ı": 46879, "tutti": 46880, "receptions": 46881, "revising": 46882, "ibrox": 46883, "topeka": 46884, "salami": 46885, "expanse": 46886, "ibooks": 46887, "dobson": 46888, "clio": 46889, "ats": 46890, "ðŁļĮ": 46891, "moha": 46892, "isance": 46893, "shutters": 46894, "moot": 46895, "janine": 46896, "marvelcomics": 46897, "jordani": 46898, "poser": 46899, "kenneth": 46900, "hyung": 46901, "deja": 46902, "aseball": 46903, "speciality": 46904, "euston": 46905, "classiccar": 46906, "hadith": 46907, "ðŁIJī": 46908, "chasing": 46909, "izo": 46910, "grosven": 46911, "aglia": 46912, "thisdayinhistory": 46913, "trow": 46914, "omile": 46915, "huar": 46916, "byn": 46917, "saline": 46918, "divine": 46919, "demonic": 46920, "tyran": 46921, "handover": 46922, "revitalization": 46923, "paella": 46924, "cryptic": 46925, "sedg": 46926, "mend": 46927, "dunkirk": 46928, "bred": 46929, "wald": 46930, "sportscar": 46931, "aard": 46932, "wheaton": 46933, "daener": 46934, "klan": 46935, "brt": 46936, "bakhtawar": 46937, "spires": 46938, "schubert": 46939, "roti": 46940, "polish": 46941, "ose": 46942, "agame": 46943, "wondercon": 46944, "protestant": 46945, "bosa": 46946, "ðŁĺŁ": 46947, "dü": 46948, "joyride": 46949, "gertrude": 46950, "âĿĿ": 46951, "gila": 46952, "vh": 46953, "twa": 46954, "trav": 46955, "swallowed": 46956, "starve": 46957, "lain": 46958, "entren": 46959, "reiki": 46960, "sukh": 46961, "craic": 46962, "azu": 46963, "webpage": 46964, "keefe": 46965, "hypothe": 46966, "hirsch": 46967, "helle": 46968, "campground": 46969, "wamy": 46970, "travi": 46971, "shahi": 46972, "sandeep": 46973, "rui": 46974, "hanuman": 46975, "dwp": 46976, "repository": 46977, "noor": 46978, "noff": 46979, "unreal": 46980, "pell": 46981, "blackhistory": 46982, "harvick": 46983, "mascar": 46984, "payee": 46985, "pasha": 46986, "gastronomy": 46987, "dÃŃ": 46988, "aig": 46989, "rosenthal": 46990, "openday": 46991, "embellished": 46992, "ttip": 46993, "sunbathing": 46994, "gopack": 46995, "endome": 46996, "ï¸ı#": 46997, "invalid": 46998, "finalfour": 46999, "stfu": 47000, "squishy": 47001, "rasta": 47002, "mosch": 47003, "jamesc": 47004, "dietrich": 47005, "sela": 47006, "melb": 47007, "elvi": 47008, "tdp": 47009, "suni": 47010, "slit": 47011, "jha": 47012, "biza": 47013, "spiked": 47014, "lli": 47015, "lillard": 47016, "vampi": 47017, "synopsis": 47018, "azhar": 47019, "kendricklamar": 47020, "ĮãĤĬãģŁãģĦ": 47021, "heartless": 47022, "countryfile": 47023, "airplay": 47024, "arrogance": 47025, "pree": 47026, "virtuoso": 47027, "ãħłãħłãħłãħł": 47028, "raju": 47029, "lebu": 47030, "forward": 47031, "tug": 47032, "dros": 47033, "mondaymotivaton": 47034, "concepcion": 47035, "thelo": 47036, "padi": 47037, "looool": 47038, "ÑĢоГ": 47039, "itss": 47040, "ethical": 47041, "enduro": 47042, "__:": 47043, "expenditure": 47044, "monste": 47045, "masking": 47046, "terriers": 47047, "ibis": 47048, "ember": 47049, "cumple": 47050, "punctuation": 47051, "piper": 47052, "irvin": 47053, "adee": 47054, "yyyyyy": 47055, "flashbacks": 47056, "celsius": 47057, "donnie": 47058, "bogota": 47059, "benevol": 47060, "thescript": 47061, "shilpa": 47062, "prose": 47063, "findia": 47064, "zeke": 47065, "neko": 47066, "doves": 47067, "blueslyrix": 47068, "frosh": 47069, "soweto": 47070, "mplo": 47071, "alai": 47072, "sabi": 47073, "raqqa": 47074, "wftv": 47075, "stroller": 47076, "iansomerhalder": 47077, "ðŁĶª": 47078, "anon": 47079, "moseley": 47080, "!?!?": 47081, "staking": 47082, "moly": 47083, "cartri": 47084, "csg": 47085, "astor": 47086, "transcend": 47087, "maer": 47088, "deux": 47089, "cowgirl": 47090, "sask": 47091, "punter": 47092, "maken": 47093, "oates": 47094, "lovett": 47095, "growler": 47096, "sagin": 47097, "vn": 47098, "ssible": 47099, "officeofrg": 47100, "ymc": 47101, "sabar": 47102, "faulty": 47103, "apha": 47104, "akon": 47105, "ðŁij«": 47106, "snowdon": 47107, "aew": 47108, "raisethe": 47109, "ðĿĵ": 47110, "gruesome": 47111, "clementine": 47112, "sping": 47113, "lata": 47114, "worldenviron": 47115, "mimic": 47116, "canaria": 47117, "bakhtawarbz": 47118, "aoa": 47119, "fala": 47120, "ãĤŃ": 47121, "aviva": 47122, "youuuu": 47123, "thigh": 47124, "ladders": 47125, "gumbo": 47126, "tzky": 47127, "fuzz": 47128, "plasticpollution": 47129, "estate": 47130, "strengthened": 47131, "kant": 47132, "drin": 47133, "calvert": 47134, "transformational": 47135, "frightened": 47136, "maclean": 47137, "elitedangerous": 47138, "earthy": 47139, "tson": 47140, "toda": 47141, "jnu": 47142, "..,": 47143, "michal": 47144, "iban": 47145, "jeong": 47146, "isreal": 47147, "simcoe": 47148, "exclusives": 47149, "bluebells": 47150, "bene": 47151, "teu": 47152, "pilsner": 47153, "penske": 47154, "atheists": 47155, "mpu": 47156, "cartagena": 47157, "ðŁēĹðŁēĹ": 47158, "millionaires": 47159, "kkkk": 47160, "itar": 47161, "subscriptions": 47162, "remote": 47163, "mafi": 47164, "hinton": 47165, "wcc": 47166, "hok": 47167, "dsb": 47168, "ableton": 47169, "seventy": 47170, "punks": 47171, "eindhoven": 47172, "shone": 47173, "mcfarlane": 47174, "limpopo": 47175, "emphasi": 47176, "ü": 47177, "sinfo": 47178, "petre": 47179, "mangrove": 47180, "chino": 47181, "bertie": 47182, "playlists": 47183, "pushawards": 47184, "paf": 47185, "debbie": 47186, "cdo": 47187, "rino": 47188, "ðŁı¾âĢįâĻĤï¸ı": 47189, "folke": 47190, "bonnar": 47191, "thine": 47192, "slan": 47193, "halter": 47194, "evie": 47195, "awsome": 47196, "vultures": 47197, "sparky": 47198, "seizures": 47199, "âľĶ": 47200, "ramone": 47201, "ineffe": 47202, "aln": 47203, "proctor": 47204, "astra": 47205, "thevoice": 47206, "grote": 47207, "scion": 47208, "deadline": 47209, "amaya": 47210, "tainted": 47211, "patterned": 47212, "exceeding": 47213, "crossfit": 47214, "kaylee": 47215, "dropbox": 47216, "rushes": 47217, "tackled": 47218, "moby": 47219, "retrogamer": 47220, "ncbd": 47221, "benefitting": 47222, "shaykh": 47223, "guildhall": 47224, "gentry": 47225, "dreamcast": 47226, "dreaded": 47227, "bundled": 47228, "thaw": 47229, "revolving": 47230, "npt": 47231, "kyliejenner": 47232, "imaginative": 47233, "roni": 47234, "overcame": 47235, "familytime": 47236, "dsburg": 47237, "carnaval": 47238, "relationship": 47239, "recognizable": 47240, "coroner": 47241, "hole": 47242, "fanfic": 47243, "emirates": 47244, "burritos": 47245, "analyse": 47246, "thinner": 47247, "nees": 47248, "gallipoli": 47249, "blr": 47250, "catwoman": 47251, "-->>": 47252, "ault": 47253, "adaily": 47254, "naughty": 47255, "ilio": 47256, "solitaire": 47257, "mtvbr": 47258, "jocelyn": 47259, "arunach": 47260, "repent": 47261, "southgate": 47262, "hyacin": 47263, "essential": 47264, "fenton": 47265, "andum": 47266, "itor": 47267, "gopal": 47268, "slinger": 47269, "posei": 47270, "awil": 47271, "wielding": 47272, "raila": 47273, "elias": 47274, "asto": 47275, "ä": 47276, "tendency": 47277, "strata": 47278, "kert": 47279, "<-": 47280, "imacele": 47281, "daes": 47282, "stimulus": 47283, "hanley": 47284, "fitnes": 47285, "ecstasy": 47286, "limous": 47287, "hailing": 47288, "ð٤Ń": 47289, "chiswick": 47290, "taries": 47291, "slav": 47292, "puli": 47293, "modernization": 47294, "blackmail": 47295, "bingham": 47296, "hfx": 47297, "++": 47298, "ðŁĩ®ðŁĩ³": 47299, "niv": 47300, "wea": 47301, "professor": 47302, "koff": 47303, "bolster": 47304, "suave": 47305, "sequences": 47306, "pepperoni": 47307, "notte": 47308, "dren": 47309, "ãģ¨ç¹ĭãģ": 47310, "hsv": 47311, "oga": 47312, "aptly": 47313, "zad": 47314, "excelsi": 47315, "rinka": 47316, "moldova": 47317, "minn": 47318, "mabel": 47319, "conferencing": 47320, "basing": 47321, "ofer": 47322, "obsi": 47323, "hamillhimself": 47324, "careless": 47325, "briefed": 47326, "inherent": 47327, "parish": 47328, "dubnation": 47329, "townsville": 47330, "sarawak": 47331, "geeky": 47332, "doncasterisgreat": 47333, "wasabi": 47334, "gup": 47335, "pheno": 47336, "drainthe": 47337, "carrieunderwood": 47338, "bleeds": 47339, "bbcworld": 47340, "anew": 47341, "altaf": 47342, "dulwich": 47343, "aniston": 47344, "wti": 47345, "sumatra": 47346, "grafton": 47347, "bln": 47348, "mester": 47349, "bodega": 47350, "rego": 47351, "esq": 47352, "anjo": 47353, "sumptuous": 47354, "maisie": 47355, "�": 47356, "wilt": 47357, "jakob": 47358, "elvis": 47359, "sepul": 47360, "muster": 47361, "airpollution": 47362, "presidente": 47363, "happymonday": 47364, "extensively": 47365, "flondon": 47366, "tls": 47367, "playing": 47368, "peed": 47369, "dinho": 47370, "vardy": 47371, "pika": 47372, "niro": 47373, "aucus": 47374, "ðŁį¦": 47375, "null": 47376, "elondon": 47377, "juventus": 47378, "imagines": 47379, "disab": 47380, "lito": 47381, "dura": 47382, "workplaces": 47383, "promote": 47384, "mccaf": 47385, "woodwork": 47386, "wawx": 47387, "ப": 47388, "ttino": 47389, "shari": 47390, "semper": 47391, "bettertogether": 47392, "ðŁijĬðŁı»": 47393, "zebra": 47394, "pondering": 47395, "enchil": 47396, "hom": 47397, "cosmic": 47398, "tanz": 47399, "mocked": 47400, "eccc": 47401, "athed": 47402, "abolish": 47403, "propeller": 47404, "parisagreement": 47405, "assemblies": 47406, "industry": 47407, "fraudulent": 47408, "pesa": 47409, "changmin": 47410, "axx": 47411, "ðŁēµ": 47412, "irrational": 47413, "cusa": 47414, "ramadhan": 47415, "octavia": 47416, "onelove": 47417, "jacki": 47418, "barak": 47419, "taxider": 47420, "serious": 47421, "nathanfillion": 47422, "mcen": 47423, "chk": 47424, "popart": 47425, "gravity": 47426, "coppola": 47427, "readingfc": 47428, "illusions": 47429, "jig": 47430, "wwx": 47431, "resh": 47432, "exporting": 47433, "buzzard": 47434, "âϤ": 47435, "pcm": 47436, "lanapar": 47437, "kos": 47438, "aromas": 47439, "antalya": 47440, "wwdc": 47441, "vena": 47442, "phila": 47443, "ballin": 47444, "ðŁijĦ": 47445, "quinta": 47446, "mao": 47447, "fery": 47448, "eighty": 47449, "sentiments": 47450, "safeguarding": 47451, "rwa": 47452, "puffs": 47453, "lucille": 47454, "decath": 47455, "slu": 47456, "nugent": 47457, "deter": 47458, "brazil": 47459, "zeiss": 47460, "superbowl": 47461, "subsidy": 47462, "altern": 47463, "hidalgo": 47464, "enzymes": 47465, "ä½": 47466, "tagne": 47467, "hairdresser": 47468, "adrien": 47469, "walkout": 47470, "opposes": 47471, "cantina": 47472, "bedside": 47473, "afan": 47474, "ðŁĶĹ": 47475, "prophetic": 47476, "danes": 47477, "unsuccessful": 47478, "supercharged": 47479, "pkk": 47480, "exemption": 47481, "hartle": 47482, "secular": 47483, "clipping": 47484, "brs": 47485, "unitedway": 47486, "cnet": 47487, "patchy": 47488, "hagan": 47489, "een": 47490, "âļľ": 47491, "vara": 47492, "sympathi": 47493, "nevertrump": 47494, "affirmation": 47495, "omf": 47496, "nycfc": 47497, "maja": 47498, "surro": 47499, "keerth": 47500, "upscale": 47501, "sandalwood": 47502, "monarchy": 47503, "knobs": 47504, "Ƅĭ": 47505, "potholes": 47506, "hungergames": 47507, "terraces": 47508, "nasir": 47509, "counsell": 47510, "welcometo": 47511, "waq": 47512, "seaman": 47513, "mita": 47514, "stunningly": 47515, "ontheroad": 47516, "inability": 47517, ")!!": 47518, "bongo": 47519, "antv": 47520, "sput": 47521, "worldenvironmentday": 47522, "resusc": 47523, "ytd": 47524, "fim": 47525, "eunhyuk": 47526, "sachin": 47527, "roseanne": 47528, "clermont": 47529, "apec": 47530, "amina": 47531, "vening": 47532, "nantes": 47533, "almost": 47534, "sinus": 47535, "exas": 47536, "tyl": 47537, "tien": 47538, "plead": 47539, "lancs": 47540, "burnaby": 47541, "rek": 47542, "joom": 47543, "observers": 47544, "discography": 47545, "clg": 47546, "âϦ": 47547, "snack": 47548, "rti": 47549, "oily": 47550, "crystalli": 47551, "brute": 47552, "webdevelopment": 47553, "toppings": 47554, "laf": 47555, "anis": 47556, "adder": 47557, "reliving": 47558, "carlin": 47559, "battleof": 47560, "weg": 47561, "syrian": 47562, "pont": 47563, "ndc": 47564, "laghate": 47565, "yuma": 47566, "spp": 47567, "piti": 47568, "robbing": 47569, "marting": 47570, "reykja": 47571, "rajput": 47572, "ncds": 47573, "kiewicz": 47574, "âĢ¢âĢ¢": 47575, "vampire": 47576, "substantially": 47577, "opioids": 47578, "nepali": 47579, "kline": 47580, "aroo": 47581, "understand": 47582, "litt": 47583, "uit": 47584, "thrombo": 47585, "saries": 47586, "quot": 47587, "balling": 47588, "ttr": 47589, "sgh": 47590, "philipp": 47591, "brant": 47592, "acl": 47593, "mello": 47594, "whittaker": 47595, ".;": 47596, "defiant": 47597, "bgc": 47598, "replying": 47599, "mirren": 47600, "metamorpho": 47601, "schwab": 47602, "bulge": 47603, "utilized": 47604, "pickering": 47605, "pardon": 47606, "dsa": 47607, "Ć ĀøÄŖ": 47608, "dooley": 47609, "cumulative": 47610, "л": 47611, "urgency": 47612, "emir": 47613, "+/-": 47614, "¦Ī": 47615, "otas": 47616, "âı³": 47617, "stationed": 47618, "grapevine": 47619, "arac": 47620, "karanjohar": 47621, "fancy": 47622, "saul": 47623, "coogs": 47624, "lgbtq": 47625, "Ć˜Ā§Ć™Ä§": 47626, "javi": 47627, "ummer": 47628, "pll": 47629, "denis": 47630, "daipur": 47631, "puffin": 47632, "lewisham": 47633, "fandom": 47634, "cope": 47635, "vesmatter": 47636, "sve": 47637, "helpless": 47638, "deodor": 47639, "ostrich": 47640, "kazan": 47641, "fridaythe": 47642, "condor": 47643, "vx": 47644, "sophomores": 47645, "robles": 47646, "cutt": 47647, "climbers": 47648, "리": 47649, "sleg": 47650, "snf": 47651, "macys": 47652, "hydrating": 47653, "groupe": 47654, "poyn": 47655, "moulin": 47656, "hgtv": 47657, "lmfaooo": 47658, "sulphur": 47659, "asdfghjkl": 47660, "annabelle": 47661, "humpback": 47662, "braved": 47663, "viswasam": 47664, "multipurpose": 47665, "humidi": 47666, "escorted": 47667, "barbican": 47668, "fad": 47669, "corsa": 47670, "ðŁ¤«": 47671, "pippa": 47672, "hereto": 47673, "cany": 47674, "sergi": 47675, "orcas": 47676, "ovie": 47677, "edou": 47678, "sany": 47679, "globalization": 47680, "mancini": 47681, "foodtruck": 47682, "fis": 47683, "defibrill": 47684, "schre": 47685, "smafia": 47686, "lovewins": 47687, "laut": 47688, "kaka": 47689, "hollande": 47690, "gameon": 47691, "resurgence": 47692, "outside": 47693, "olympiad": 47694, "intan": 47695, "abstraction": 47696, "rapid": 47697, "palom": 47698, "calle": 47699, "jasmin": 47700, "attackers": 47701, "swagg": 47702, "mitra": 47703, "kylo": 47704, "ல": 47705, "hermitage": 47706, "gordo": 47707, "eira": 47708, "sosfam": 47709, "rollout": 47710, "excite": 47711, "synod": 47712, "merrill": 47713, "cals": 47714, "assa": 47715, "livelihoods": 47716, "juve": 47717, "theblack": 47718, "gopackgo": 47719, "antlers": 47720, "albanian": 47721, "woolly": 47722, "quiche": 47723, "purification": 47724, "areth": 47725, "smarthome": 47726, "nek": 47727, "allblacks": 47728, "mexicans": 47729, "ism": 47730, "germs": 47731, "complexion": 47732, "marck": 47733, "ushi": 47734, "ðŁIJIJ": 47735, "charl": 47736, "castic": 47737, "tillerson": 47738, "giuliani": 47739, "biodegradable": 47740, "malbec": 47741, "bois": 47742, "jubil": 47743, "imes": 47744, "rame": 47745, "genetic": 47746, "espnu": 47747, "chley": 47748, "soho": 47749, "gopher": 47750, "gsc": 47751, "buuren": 47752, "cube": 47753, "bridesmaids": 47754, "webinars": 47755, "toe": 47756, "manipur": 47757, "violently": 47758, "noticias": 47759, "exchanging": 47760, "chiev": 47761, "replaceable": 47762, "muaythai": 47763, "buss": 47764, "spil": 47765, "instalment": 47766, "divya": 47767, "caitlin": 47768, "olim": 47769, "filtering": 47770, "whirlwind": 47771, "stared": 47772, "priorit": 47773, "pram": 47774, "pompeii": 47775, "monologue": 47776, "kite": 47777, "buka": 47778, "â̦..": 47779, "vaccine": 47780, "brero": 47781, "wozni": 47782, "solent": 47783, "referr": 47784, "myrt": 47785, "gridiron": 47786, "galatasaray": 47787, "froze": 47788, "claremont": 47789, "ðŁ„Ą": 47790, "victorias": 47791, "sseldorf": 47792, "pastures": 47793, "netneutrality": 47794, "chor": 47795, "ðŁijģ": 47796, "ಿ": 47797, "weho": 47798, "symptom": 47799, "josel": 47800, "inous": 47801, "dragoncon": 47802, "powerball": 47803, "pte": 47804, "fourthofjuly": 47805, "ecla": 47806, "earbuds": 47807, "whereabouts": 47808, "saltlife": 47809, "deprivation": 47810, "chter": 47811, "wiggle": 47812, "system": 47813, "psst": 47814, "chaz": 47815, "dany": 47816, "rimo": 47817, "oaxaca": 47818, "lanaparrilla": 47819, "barcelon": 47820, "melancholy": 47821, "wayback": 47822, "hotro": 47823, "nsi": 47824, "lilly": 47825, "kuro": 47826, "jahan": 47827, "intellect": 47828, "boardgame": 47829, "ðŁıĬ": 47830, "sneakpeek": 47831, "kprc": 47832, "jails": 47833, "candel": 47834, "zanzi": 47835, "mortimer": 47836, "starch": 47837, "rags": 47838, "pfa": 47839, "longlive": 47840, "kart": 47841, "girona": 47842, "crocker": 47843, "christoph": 47844, "precautions": 47845, "warship": 47846, "perm": 47847, "parent": 47848, "vangogh": 47849, "gifford": 47850, "allegheny": 47851, "rayn": 47852, "utm": 47853, "stencil": 47854, "recalling": 47855, "penney": 47856, "zazzle": 47857, "ìĄĿ": 47858, "hinds": 47859, "arenas": 47860, "nuev": 47861, "lawler": 47862, "guin": 47863, "dothis": 47864, "ðŁijķ": 47865, "ì¶ķíķĺ": 47866, "weg": 47867, "tib": 47868, "ridin": 47869, "complexes": 47870, "turbulent": 47871, "pesos": 47872, "demarcus": 47873, "vallarta": 47874, "samsun": 47875, "kisses": 47876, "heinrich": 47877, "deportes": 47878, "wilms": 47879, "urd": 47880, "thenext": 47881, "inkigayo": 47882, "howi": 47883, "firsts": 47884, "carriage": 47885, "cleanliness": 47886, "maswar": 47887, "isch": 47888, "axel": 47889, "sizzle": 47890, "roadhouse": 47891, "frans": 47892, "entourage": 47893, "cobble": 47894, "booth": 47895, "benedict": 47896, "talon": 47897, "fcu": 47898, "yearofthe": 47899, "rayon": 47900, "raidernation": 47901, "foyle": 47902, "koval": 47903, "pianos": 47904, "lpg": 47905, "burmese": 47906, "manure": 47907, "geocaching": 47908, "coscino": 47909, "bnp": 47910, "ferra": 47911, "strophy": 47912, "marais": 47913, "cees": 47914, "legendof": 47915, "katniss": 47916, "enoch": 47917, "aved": 47918, "youknow": 47919, "dprk": 47920, "ðŁĺ¢ðŁĺ¢": 47921, "spun": 47922, "prost": 47923, "sorrows": 47924, "centred": 47925, "kea": 47926, "galicia": 47927, "?ð٤Ķ": 47928, "ÑĢоГа": 47929, "bouchard": 47930, "ðŁēĻðŁēľ": 47931, "yui": 47932, "seedlings": 47933, "jonah": 47934, "recovers": 47935, "nyrd": 47936, "boardroom": 47937, "suma": 47938, "myjaps": 47939, "tung": 47940, "shai": 47941, "irgc": 47942, "elio": 47943, "wagons": 47944, "kashi": 47945, "policemen": 47946, "johnnie": 47947, "alecoscino": 47948, "shopify": 47949, "dotted": 47950, "detri": 47951, "vaw": 47952, "tofficial": 47953, "inyour": 47954, "chalmers": 47955, "traced": 47956, "novi": 47957, "byes": 47958, "ariel": 47959, "nippon": 47960, "lapel": 47961, "griez": 47962, "bgs": 47963, "fooling": 47964, "dita": 47965, "vijaysethu": 47966, "nmwx": 47967, "asot": 47968, "kranti": 47969, "helm": 47970, "vedi": 47971, "sickest": 47972, "mochi": 47973, "kabo": 47974, "shrubs": 47975, "hered": 47976, "bsp": 47977, "sqm": 47978, "hamr": 47979, "dulkar": 47980, "antha": 47981, "nrf": 47982, "avoidance": 47983, "aten": 47984, "publix": 47985, "bearers": 47986, "nasi": 47987, "hap": 47988, "hells": 47989, "ðŁĸ„": 47990, "Ć ĀøĀ·": 47991, "thelastjedi": 47992, "ohwx": 47993, "ðŁį«": 47994, "wahoo": 47995, "therese": 47996, "recaps": 47997, "ssnhq": 47998, "birdphotography": 47999, "vay": 48000, "petti": 48001, "paulo": 48002, "belvedere": 48003, "(*": 48004, "grl": 48005, "duvet": 48006, "cpec": 48007, "sait": 48008, "porsch": 48009, "measurable": 48010, "aviators": 48011, "fremantle": 48012, "breen": 48013, "onom": 48014, "meand": 48015, "lifesaving": 48016, "euref": 48017, "endon": 48018, "embaras": 48019, "airasia": 48020, "elis": 48021, "dunkin": 48022, "starmagic": 48023, "sill": 48024, "portobello": 48025, "kiefer": 48026, "exe": 48027, "muted": 48028, "ãģ¦": 48029, "wethepeople": 48030, "logia": 48031, "liberal": 48032, "theforceawakens": 48033, "mined": 48034, "haunts": 48035, "freckles": 48036, "caretaker": 48037, "sindia": 48038, "âķIJ": 48039, "devlin": 48040, "liston": 48041, "directioner": 48042, "ohn": 48043, "figaro": 48044, "emmanuel": 48045, "dubois": 48046, "clones": 48047, "bruise": 48048, "ðŁİĪðŁİī": 48049, "disinfe": 48050, "dermatology": 48051, "asr": 48052, "swatch": 48053, "discomfort": 48054, "tamanna": 48055, "piday": 48056, "macken": 48057, "katic": 48058, "delusional": 48059, "shawnee": 48060, "gud": 48061, "albino": 48062, "pali": 48063, "dingh": 48064, "cucumbers": 48065, "coffey": 48066, "anticipating": 48067, "treasured": 48068, "websummit": 48069, "sheltered": 48070, "savor": 48071, "pedagogy": 48072, "mgs": 48073, "shma": 48074, "sbu": 48075, "denali": 48076, "campos": 48077, "bubblegum": 48078, "oir": 48079, "leaps": 48080, "yler": 48081, "rone": 48082, "sanskrit": 48083, "mint": 48084, "meatless": 48085, "futurist": 48086, "dude": 48087, "avel": 48088, "protested": 48089, "squire": 48090, "zaki": 48091, "szn": 48092, "harcourt": 48093, "cyclone": 48094, "bourdain": 48095, "gatherings": 48096, "dant": 48097, "adventurer": 48098, "paragon": 48099, "altman": 48100, "dding": 48101, "banerjee": 48102, "snorkeling": 48103, "motherwell": 48104, "missy": 48105, "ender": 48106, "glows": 48107, "kiwis": 48108, "chickpea": 48109, "poro": 48110, "efron": 48111, "appt": 48112, "uy": 48113, "specified": 48114, "gabby": 48115, "estrada": 48116, "combos": 48117, "bourbon": 48118, "vini": 48119, "varun": 48120, "stephani": 48121, "keywords": 48122, "carvings": 48123, "amitabh": 48124, "wrought": 48125, "twal": 48126, "reels": 48127, "clubbing": 48128, "ubiquit": 48129, "crit": 48130, "ambedkar": 48131, "ƦĻ": 48132, "pruning": 48133, "vaccinated": 48134, "boeing": 48135, "sks": 48136, "loona": 48137, "hypnosis": 48138, "edelman": 48139, "phol": 48140, "hew": 48141, "colosse": 48142, "mckinsey": 48143, "uon": 48144, "tote": 48145, "sacrificing": 48146, "oxi": 48147, "nang": 48148, "emu": 48149, "пÑĢиÑĢоГа": 48150, "mth": 48151, "kerswednesday": 48152, "argued": 48153, "timelapse": 48154, "risking": 48155, "regulating": 48156, "nigh": 48157, "likelihood": 48158, "cubic": 48159, "auction": 48160, "reinfor": 48161, "pistor": 48162, "noses": 48163, "yel": 48164, "snuggles": 48165, "pei": 48166, "jeanette": 48167, "taku": 48168, "rith": 48169, "guyz": 48170, "Ć ĀøÅ€": 48171, "yte": 48172, "verted": 48173, "paysoff": 48174, "jauregui": 48175, "hooligans": 48176, "procedural": 48177, "mib": 48178, "hardy": 48179, "eleng": 48180, "checkers": 48181, "alline": 48182, "themet": 48183, "proudof": 48184, "keerthyofficial": 48185, "collaborator": 48186, "niu": 48187, "inflicted": 48188, "advani": 48189, "retwee": 48190, "memoriam": 48191, "ficial": 48192, "tighter": 48193, "salem": 48194, "reviewers": 48195, "brics": 48196, "bendigo": 48197, "amell": 48198, "turkish": 48199, "sushmaswar": 48200, "paulson": 48201, "palawan": 48202, "mollie": 48203, "stitcher": 48204, "sburgh": 48205, "iru": 48206, "haydn": 48207, "eners": 48208, "aroa": 48209, "uzzi": 48210, "sarajevo": 48211, "hela": 48212, "apollo": 48213, "ninety": 48214, "vaca": 48215, "spon": 48216, "ventu": 48217, "jelena": 48218, "heifer": 48219, "avoids": 48220, "spine": 48221, "prize": 48222, "marist": 48223, "recreating": 48224, "mede": 48225, "wooden": 48226, "findlay": 48227, "rofl": 48228, "ndi": 48229, "comprehend": 48230, "yugo": 48231, "yü": 48232, "towork": 48233, "ufos": 48234, "sonar": 48235, "piston": 48236, "recording": 48237, "tentative": 48238, "artforsale": 48239, "pellets": 48240, "fredo": 48241, "Ć™ÄŖĆ˜Ā±": 48242, "muses": 48243, "customization": 48244, "profound": 48245, "isner": 48246, "ideally": 48247, "siam": 48248, "plankton": 48249, "cmdr": 48250, "manger": 48251, "franken": 48252, "customizable": 48253, "म": 48254, "walkaway": 48255, "swivel": 48256, "vastly": 48257, "noton": 48258, "lexa": 48259, "exmoor": 48260, "zas": 48261, "tante": 48262, "reductions": 48263, "lolly": 48264, "hipsters": 48265, "benefited": 48266, "ë²": 48267, "wwwww": 48268, "masculine": 48269, "fiji": 48270, "drey": 48271, "phill": 48272, "aneous": 48273, "nicol": 48274, "mendez": 48275, "disappro": 48276, "chner": 48277, "throughs": 48278, "shenmue": 48279, "eastman": 48280, "ðŁIJİ": 48281, "yuck": 48282, "undertale": 48283, "reys": 48284, "gobeavs": 48285, "engen": 48286, "cna": 48287, "merr": 48288, "birk": 48289, "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, "âĄ£@": 48291, "ynna": 48292, "steed": 48293, "offender": 48294, "atum": 48295, "vanishing": 48296, "presidenti": 48297, "lovethem": 48298, "gnocchi": 48299, "friggin": 48300, "peril": 48301, "madhya": 48302, "agne": 48303, "deejay": 48304, "marnock": 48305, "mtb": 48306, "foldable": 48307, "@___": 48308, "standre": 48309, "bronx": 48310, "bowski": 48311, "finite": 48312, "crockett": 48313, "bsf": 48314, "getit": 48315, "serenawilliams": 48316, "miro": 48317, "ignatius": 48318, "slay": 48319, "rinse": 48320, "fondue": 48321, "seldom": 48322, "smore": 48323, "gani": 48324, "dyce": 48325, "dmitry": 48326, "crumb": 48327, "latepost": 48328, "primark": 48329, "ohana": 48330, "florals": 48331, "doa": 48332, "remembranceday": 48333, "dds": 48334, "azione": 48335, "toonami": 48336, "airport": 48337, "æĿ±": 48338, "thad": 48339, "fist": 48340, "dinesh": 48341, "drwho": 48342, "adwords": 48343, "admirer": 48344, "proje": 48345, "kyrgyz": 48346, "Ć Ā«": 48347, "manifestation": 48348, "lewan": 48349, "jic": 48350, "thibau": 48351, "leased": 48352, "vanity": 48353, "nourished": 48354, "nevertheless": 48355, "augmente": 48356, "fuelled": 48357, "chead": 48358, "wilshere": 48359, "rudi": 48360, "pz": 48361, "myco": 48362, "morro": 48363, "herbalife": 48364, "hardrock": 48365, "deman": 48366, "dreality": 48367, "spades": 48368, "cevic": 48369, "bhai": 48370, "baron": 48371, "ultimatefan": 48372, "hounews": 48373, "tobi": 48374, "strut": 48375, "keel": 48376, "affiliation": 48377, "themasters": 48378, "smal": 48379, "hue": 48380, "esteban": 48381, "conv": 48382, "omnic": 48383, "databases": 48384, "cov": 48385, "terti": 48386, "stg": 48387, "snoopdogg": 48388, "metabol": 48389, "lethbridge": 48390, "ðŁı»âĢįâĻĢï¸ı": 48391, "yearling": 48392, "residentevil": 48393, "nwsl": 48394, "iyaki": 48395, "griezmann": 48396, "cous": 48397, "ðŁĵĿ:": 48398, "torian": 48399, "sami": 48400, "ðŁĶ„ðŁĶ„ðŁĶ„ðŁĶ„ðŁĶ„": 48401, "gare": 48402, "alliances": 48403, "whitfield": 48404, "wether": 48405, "refining": 48406, "coyi": 48407, "kraken": 48408, "ðŁĺĺâĿ¤": 48409, "singularity": 48410, "lili": 48411, "hns": 48412, "boldand": 48413, "wawrinka": 48414, "misogyny": 48415, "lovers": 48416, "cq": 48417, "bdg": 48418, "adona": 48419, "garter": 48420, "womenof": 48421, "scd": 48422, "recognising": 48423, "muna": 48424, "strou": 48425, "signalling": 48426, "laredo": 48427, "hellboy": 48428, "aleksand": 48429, "unavailable": 48430, "pediatric": 48431, "asin": 48432, "meria": 48433, "rishi": 48434, "futurism": 48435, "wye": 48436, "polarized": 48437, "ewe": 48438, "propel": 48439, "informs": 48440, "crease": 48441, "~\"": 48442, "artiston": 48443, "likefor": 48444, "heidelberg": 48445, "erra": 48446, "lifein": 48447, "lenny": 48448, "interrupt": 48449, "coherent": 48450, "caz": 48451, "vickers": 48452, "leveled": 48453, "fbs": 48454, "cabins": 48455, "bummed": 48456, "apostles": 48457, "weh": 48458, "tendon": 48459, "souvenirs": 48460, "infuri": 48461, "pierce": 48462, "asset": 48463, "mlas": 48464, "goth": 48465, "diggin": 48466, "annas": 48467, "ylor": 48468, "thwaite": 48469, "swel": 48470, "panera": 48471, "murderers": 48472, "crooked": 48473, "bsgo": 48474, "acu": 48475, "aon": 48476, "rean": 48477, "oneof": 48478, "kohl": 48479, "bloodh": 48480, "pesticide": 48481, "lostdog": 48482, "flexing": 48483, "ëĤĺ": 48484, "supra": 48485, "eternally": 48486, "ðŁļĻ": 48487, "paolo": 48488, "olan": 48489, "momo": 48490, "iselle": 48491, "captainmarvel": 48492, "slou": 48493, "mistakenly": 48494, "akhilesh": 48495, "mert": 48496, "ilinan": 48497, "buon": 48498, "balkan": 48499, "mirro": 48500, "millen": 48501, "derail": 48502, "damon": 48503, "titi": 48504, "bios": 48505, "redon": 48506, "picard": 48507, "parte": 48508, "ðŁ¤Ł": 48509, "غ": 48510, "sonics": 48511, "firsth": 48512, "ddc": 48513, "vegans": 48514, "turban": 48515, "nigan": 48516, "lottie": 48517, "lyndon": 48518, "starbuck": 48519, "pinkfloyd": 48520, "lifestyles": 48521, "amara": 48522, "ashe": 48523, "rsc": 48524, "vala": 48525, "smer": 48526, "cwgc": 48527, "client": 48528, "buenas": 48529, "jagan": 48530, "coops": 48531, "ðŁijijðŁijij": 48532, "specializes": 48533, "snagged": 48534, "glar": 48535, "bennet": 48536, "wildlifewednesday": 48537, "bowden": 48538, "pik": 48539, "artin": 48540, "emporium": 48541, "arl": 48542, "reba": 48543, "passer": 48544, "disappoints": 48545, "additive": 48546, "âľĬðŁı½": 48547, "bayer": 48548, "missoula": 48549, "haskell": 48550, "commences": 48551, "nix": 48552, "neman": 48553, "exploited": 48554, "plasticsurgery": 48555, "ccd": 48556, "asocial": 48557, "vot": 48558, "siegel": 48559, "froome": 48560, "kapam": 48561, "fara": 48562, "eha": 48563, "probes": 48564, "mwf": 48565, "meeting": 48566, "pbb": 48567, "akins": 48568, "mistletoe": 48569, "kingdomhearts": 48570, "forkids": 48571, "ecr": 48572, "bale": 48573, "escorts": 48574, "adidasoriginals": 48575, "kwa": 48576, "kts": 48577, "halloffame": 48578, "ðŁĺį.": 48579, "wags": 48580, "potted": 48581, "owing": 48582, "honeycomb": 48583, "hefty": 48584, "urology": 48585, "merle": 48586, "bpd": 48587, "stripping": 48588, "reich": 48589, "kstate": 48590, "guay": 48591, "yonge": 48592, "shakti": 48593, "gloom": 48594, "batt": 48595, "sonom": 48596, "nery": 48597, "elba": 48598, "blanks": 48599, "helle": 48600, "triplets": 48601, "bombay": 48602, "akarta": 48603, "abia": 48604, "transmitted": 48605, "rolf": 48606, "jais": 48607, "angularjs": 48608, "fierc": 48609, "mss": 48610, "trace": 48611, "à„ĩ": 48612, "tombs": 48613, "oldman": 48614, "kombucha": 48615, "fol": 48616, "ehealth": 48617, "cereals": 48618, "arelli": 48619, "inari": 48620, "ðŁē©": 48621, "wol": 48622, "liberties": 48623, "fawn": 48624, "affirm": 48625, "nunavut": 48626, "hysterical": 48627, "kdrama": 48628, "artes": 48629, "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, "valentin": 48631, "manslaughter": 48632, "gales": 48633, "eoin": 48634, "energized": 48635, "dels": 48636, "withdraws": 48637, "stles": 48638, "sarcastic": 48639, "ramesh": 48640, "incredibles": 48641, "lockhart": 48642, "yawn": 48643, "ultimatefanlive": 48644, "oooooooooooooooo": 48645, "muen": 48646, "gurudev": 48647, "teer": 48648, "peeling": 48649, "newsnow": 48650, "linguistics": 48651, "directv": 48652, "agend": 48653, "unilever": 48654, "ruger": 48655, "handedly": 48656, "erose": 48657, "limel": 48658, "thec": 48659, "royalties": 48660, "finishers": 48661, "nrg": 48662, "mgt": 48663, "fidget": 48664, "comps": 48665, "bacon": 48666, "aggressively": 48667, "abit": 48668, "châ": 48669, "tarde": 48670, "slugger": 48671, "qanda": 48672, "greening": 48673, "dats": 48674, "enslaved": 48675, "spector": 48676, "oye": 48677, "freef": 48678, "bhand": 48679, "stopbrexit": 48680, "misconceptions": 48681, "cava": 48682, "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, "multitasking": 48684, "housel": 48685, "ferreira": 48686, "centime": 48687, "ankles": 48688, "jodh": 48689, "helly": 48690, "frome": 48691, "outtuesday": 48692, "narnia": 48693, "balaji": 48694, "lbloggers": 48695, "jyoti": 48696, "ðŁįĩ": 48697, "lancia": 48698, "capri": 48699, "yap": 48700, "natash": 48701, "downfall": 48702, ".\"âĢĶ": 48703, "î": 48704, "ligament": 48705, "coatings": 48706, "aided": 48707, "hiko": 48708, "falling": 48709, "encrypted": 48710, "yegfood": 48711, "infringement": 48712, "cudi": 48713, "cep": 48714, "ðŁĺįðŁĺĤ": 48715, "trad": 48716, "superrugby": 48717, "edwin": 48718, "whiche": 48719, "vimeo": 48720, "layne": 48721, "invigor": 48722, "hehe": 48723, "dubrovnik": 48724, "bieber": 48725, "utr": 48726, "shaman": 48727, "opers": 48728, "hamill": 48729, "enig": 48730, "dif": 48731, "arum": 48732, "scrapbook": 48733, "minh": 48734, "divergence": 48735, "mckinnon": 48736, "lifetime": 48737, "guterres": 48738, "wille": 48739, "pleas": 48740, "patty": 48741, "micron": 48742, "kz": 48743, "domaine": 48744, "rusher": 48745, "mds": 48746, "chesney": 48747, "screwdriver": 48748, "âģ©,": 48749, "sledge": 48750, "hauer": 48751, "chana": 48752, "stamina": 48753, "sprinkler": 48754, "pln": 48755, "heff": 48756, "bolton": 48757, "omon": 48758, "carrington": 48759, "accordion": 48760, "jorge": 48761, "interception": 48762, "inputs": 48763, "gull": 48764, "transcription": 48765, "vanuatu": 48766, "itical": 48767, "ethos": 48768, "tich": 48769, "spacey": 48770, "peeking": 48771, "umi": 48772, "hager": 48773, "psychotic": 48774, "illian": 48775, "illia": 48776, "bonnaroo": 48777, "anese": 48778, "puc": 48779, "laghateparth": 48780, "enhall": 48781, "economical": 48782, "dredge": 48783, "%-": 48784, "uwe": 48785, "tubular": 48786, "scouncil": 48787, "peasants": 48788, "fler": 48789, "tumbler": 48790, "hep": 48791, "fordham": 48792, "rowley": 48793, "initials": 48794, "evasion": 48795, "ernation": 48796, "plugins": 48797, "cochran": 48798, "cattle": 48799, "acidity": 48800, "ðŁİĬðŁİī": 48801, "regrann": 48802, "jumpman": 48803, "eface": 48804, "xma": 48805, "patriarchy": 48806, "escobar": 48807, "cristian": 48808, "tipton": 48809, "nueva": 48810, "hackney": 48811, "backseat": 48812, "killarney": 48813, "aidan": 48814, "stadion": 48815, "simultaneous": 48816, "idaho": 48817, "aje": 48818, "uth": 48819, "figure": 48820, "clos": 48821, "burk": 48822, "voluntar": 48823, "recite": 48824, "macfarlane": 48825, "curfew": 48826, "boudo": 48827, "wgn": 48828, "stix": 48829, "slap": 48830, "scratched": 48831, "phillip": 48832, "journe": 48833, "expelled": 48834, "waz": 48835, "uke": 48836, "tatiana": 48837, "oue": 48838, "hopp": 48839, "dimitri": 48840, "ðŁĵ£": 48841, "matologist": 48842, "electrifying": 48843, "bluffs": 48844, "billsmafia": 48845, "azcardinals": 48846, "yaa": 48847, "xmas": 48848, "shara": 48849, "rith": 48850, "gills": 48851, "dres": 48852, "barton": 48853, "authorization": 48854, "imperialism": 48855, "homeof": 48856, "todo": 48857, "footpath": 48858, "bandwidth": 48859, "visitspain": 48860, "mohsin": 48861, "erupted": 48862, "miki": 48863, "insignia": 48864, "mikel": 48865, "ssh": 48866, "gera": 48867, "bankholiday": 48868, "awan": 48869, "tweak": 48870, "starcraft": 48871, "eal": 48872, "construction": 48873, "skeletons": 48874, "leep": 48875, "inem": 48876, "barclay": 48877, "shipwreck": 48878, "monsieur": 48879, "yoh": 48880, "ront": 48881, "formative": 48882, "sero": 48883, "lep": 48884, "horseman": 48885, "hoosier": 48886, "hazmat": 48887, "cylinders": 48888, "centi": 48889, "ðŁē„ðŁē„ðŁē„": 48890, "reem": 48891, "naire": 48892, "musically": 48893, "grasshopper": 48894, "estonian": 48895, "terminology": 48896, "romain": 48897, "bloggerrt": 48898, "toxin": 48899, "stance": 48900, "cultivated": 48901, "anast": 48902, "ðŁIJį": 48903, "shimano": 48904, "gopher": 48905, "enei": 48906, "recyclable": 48907, "gamification": 48908, "fightfor": 48909, "cq": 48910, "avocados": 48911, "keys": 48912, "elike": 48913, "glycer": 48914, "shakur": 48915, "mobilization": 48916, "galley": 48917, "explain": 48918, "exchanged": 48919, "peth": 48920, "obedience": 48921, "illage": 48922, "ennis": 48923, "ãĄŀ": 48924, "wiv": 48925, "wallabies": 48926, "maar": 48927, "igers": 48928, "fintech": 48929, "finalized": 48930, "woj": 48931, "meaningless": 48932, "infield": 48933, "onnaise": 48934, "eet": 48935, "bronte": 48936, "passages": 48937, "ðŁij§": 48938, "strickland": 48939, "northernlights": 48940, "lomond": 48941, "htc": 48942, "wray": 48943, "shifter": 48944, "dialog": 48945, "ðŁįį": 48946, ">>>>>>": 48947, "teatime": 48948, "stech": 48949, "sichuan": 48950, "quill": 48951, "franca": 48952, "complementary": 48953, "barrington": 48954, "marcus": 48955, "malam": 48956, "goooo": 48957, "forsa": 48958, "electra": 48959, "afs": 48960, "âĹĨ": 48961, "trife": 48962, "snazzy": 48963, "folia": 48964, "andolan": 48965, "afterdark": 48966, "woodson": 48967, "strade": 48968, "littlest": 48969, "ogun": 48970, "conwy": 48971, "cowards": 48972, "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, "íĬ¸": 48974, "seul": 48975, "murphy": 48976, "dunks": 48977, "kapilshar": 48978, "joachim": 48979, "womack": 48980, "equality": 48981, "averages": 48982, "aine": 48983, "ð٦Ī": 48984, "tacular": 48985, "disability": 48986, "uked": 48987, "midcentury": 48988, "barthol": 48989, "teasers": 48990, "tabern": 48991, "njcaa": 48992, "spout": 48993, "opi": 48994, "kubball": 48995, "blom": 48996, "soar": 48997, "populism": 48998, "methyl": 48999, "ðŁijĬðŁı¼": 49000, "ospre": 49001, "aloils": 49002, "ðŁĵĸ": 49003, "ðŁĮļ": 49004, "xer": 49005, "spilling": 49006, "publica": 49007, "cardam": 49008, "adish": 49009, "sacha": 49010, "pkg": 49011, "buda": 49012, "lyricist": 49013, "ibc": 49014, "grump": 49015, "hover": 49016, "halep": 49017, "antibody": 49018, "anemone": 49019, "âĻ„âĻ„âĻ„âĻ„": 49020, "mcl": 49021, "lithograph": 49022, "ccu": 49023, "sfest": 49024, "pathic": 49025, "callister": 49026, "ottawa": 49027, "gunsn": 49028, "rutger": 49029, "halibut": 49030, "envision": 49031, "differentiate": 49032, "ðŁļĢðŁļĢ": 49033, "piran": 49034, "latel": 49035, "ucn": 49036, "troubad": 49037, "raine": 49038, "fiercely": 49039, "learnenglish": 49040, "lease": 49041, "wexmondays": 49042, "emit": 49043, "drayton": 49044, "burrell": 49045, "scubadiving": 49046, "holler": 49047, "dru": 49048, "clocked": 49049, "wral": 49050, "apro": 49051, "translucent": 49052, "wbo": 49053, "patriarch": 49054, "moja": 49055, "lannister": 49056, "fishery": 49057, "nederland": 49058, "mildly": 49059, "mirai": 49060, "mako": 49061, "jap": 49062, "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, "prostatec": 49064, "panna": 49065, "arama": 49066, "undertaking": 49067, "tompkins": 49068, "neop": 49069, "solids": 49070, "savoury": 49071, "eames": 49072, "cutlery": 49073, "woodbridge": 49074, "steamer": 49075, "rizzo": 49076, "wildcat": 49077, "ratna": 49078, "laminated": 49079, "kineni": 49080, "jalap": 49081, "aides": 49082, "acknowledges": 49083, "?!?!?!": 49084, "!ðŁİī": 49085, "wafc": 49086, "maggio": 49087, "haves": 49088, "darje": 49089, "ofi": 49090, "gril": 49091, "vasi": 49092, "brux": 49093, "mohd": 49094, "fakespeare": 49095, "arnold": 49096, "rmb": 49097, "forbe": 49098, "walleye": 49099, "rodi": 49100, "therapeutics": 49101, "strategi": 49102, "obste": 49103, "mudder": 49104, "downloadable": 49105, "ddings": 49106, "dca": 49107, "asiangames": 49108, "campeon": 49109, "appropriation": 49110, "thcentury": 49111, "ramatta": 49112, "draped": 49113, "bullion": 49114, "muc": 49115, "onex": 49116, "segreg": 49117, "ophelia": 49118, "bodily": 49119, "âĿ¤ðŁĺį": 49120, "wizar": 49121, "teased": 49122, "ademy": 49123, "toid": 49124, "sura": 49125, "lazarus": 49126, "snickers": 49127, "mase": 49128, "loh": 49129, "bowed": 49130, "biblio": 49131, "xchange": 49132, "harlan": 49133, "ghoshal": 49134, "flavorful": 49135, "bhagat": 49136, "allez": 49137, "whichever": 49138, "tenstein": 49139, "discer": 49140, "organiser": 49141, "mtg": 49142, "dreamliner": 49143, "tse": 49144, "hokkaido": 49145, "mok": 49146, "indulgent": 49147, "hickman": 49148, "blinded": 49149, "alyn": 49150, "aaaah": 49151, "spool": 49152, "loughborough": 49153, "interpret": 49154, "etv": 49155, "aristotle": 49156, "optimizing": 49157, "avicii": 49158, "madurai": 49159, "juli": 49160, "nawaz": 49161, "matchups": 49162, "abide": 49163, "painting": 49164, "welling": 49165, "veli": 49166, "octagon": 49167, "inscribed": 49168, "poking": 49169, "placer": 49170, "lifecycle": 49171, "kilig": 49172, "gsp": 49173, "elives": 49174, "clements": 49175, "nasheed": 49176, "mesut": 49177, "incarcerated": 49178, "distilled": 49179, "walang": 49180, "delicacy": 49181, "delgado": 49182, "chez": 49183, "chita": 49184, "adero": 49185, "tux": 49186, "patil": 49187, "odo": 49188, "abhcosmetics": 49189, "tvc": 49190, "pbc": 49191, "inaccurate": 49192, "hardworkpaysoff": 49193, "baller": 49194, "quotation": 49195, "merchandising": 49196, "gastri": 49197, "defenses": 49198, "drogba": 49199, "bexhill": 49200, "bankno": 49201, "winona": 49202, "sieg": 49203, "pgs": 49204, "hahahha": 49205, "aguchi": 49206, "subram": 49207, "miracle": 49208, "desch": 49209, "libre": 49210, "bacher": 49211, "entine": 49212, "bbcradi": 49213, "loudest": 49214, "rps": 49215, "pierc": 49216, "fryer": 49217, "stormtrooper": 49218, "rafaelnadal": 49219, "pasco": 49220, "exhaustion": 49221, "epiconetsy": 49222, "rctid": 49223, "kellie": 49224, "gaines": 49225, "dbz": 49226, "smriti": 49227, "sbridge": 49228, "limited": 49229, "claw": 49230, "technical": 49231, "biographical": 49232, "adored": 49233, "ะ": 49234, "exclude": 49235, "acadia": 49236, "keyboards": 49237, "furman": 49238, "soca": 49239, "suru": 49240, "nips": 49241, "swaps": 49242, "serverless": 49243, "rune": 49244, "puffy": 49245, "northampton": 49246, "nishings": 49247, "hender": 49248, "cartridges": 49249, "gunshot": 49250, "ðŁĵ¹": 49251, "filament": 49252, "respondents": 49253, "peyton": 49254, "mountaineer": 49255, "merging": 49256, "lifespan": 49257, "intimidation": 49258, "pafc": 49259, "nlwx": 49260, "expansive": 49261, "purr": 49262, "fck": 49263, "cae": 49264, "atti": 49265, "telethon": 49266, "sohn": 49267, "mendel": 49268, "lopes": 49269, "dori": 49270, "unbroken": 49271, "tered": 49272, "tastings": 49273, "inactive": 49274, "disintegr": 49275, "tassel": 49276, "sharethe": 49277, "piano": 49278, "islay": 49279, "airspace": 49280, "zawa": 49281, "ricciardo": 49282, "mington": 49283, "fresher": 49284, "curry": 49285, "revs": 49286, "pharoah": 49287, "hmv": 49288, "exhilarating": 49289, "whoo": 49290, "linkin": 49291, "krispy": 49292, "competency": 49293, "stewards": 49294, "nebu": 49295, "katsu": 49296, "admins": 49297, "bazar": 49298, "asar": 49299, "givingback": 49300, "ssummit": 49301, "songz": 49302, "linus": 49303, "rajkumar": 49304, "farmington": 49305, "fantasia": 49306, "ðŁĺ“ðŁĺ“": 49307, "sobri": 49308, "lisse": 49309, "barrymore": 49310, "prism": 49311, "blob": 49312, "senew": 49313, "monoxide": 49314, "expire": 49315, "eighteen": 49316, "dipper": 49317, "xiao": 49318, "kilt": 49319, "hinch": 49320, "bbcsport": 49321, "bamboo": 49322, "pter": 49323, "exal": 49324, "ð٦ĭ": 49325, "hamlin": 49326, "expeditions": 49327, "stargazing": 49328, "foodsecurity": 49329, "wylie": 49330, "ulf": 49331, "stingly": 49332, "onstorm": 49333, "loeb": 49334, "broome": 49335, "bnha": 49336, "pancreatic": 49337, "elive": 49338, "!!!!!!!!!!!": 49339, "therapper": 49340, "orthopedic": 49341, "avengersendgame": 49342, "antitrust": 49343, "ìļ°": 49344, "gote": 49345, "omd": 49346, "offside": 49347, "gyllen": 49348, "wineries": 49349, "whitewater": 49350, "adl": 49351, "lupita": 49352, "exceeds": 49353, "consisted": 49354, "chewbacca": 49355, "ashleigh": 49356, "nhljets": 49357, "issan": 49358, "shld": 49359, "hayat": 49360, "cranberries": 49361, "ð٤ĺðŁı½": 49362, "rockthe": 49363, "springtraining": 49364, "fallout": 49365, "dairyfree": 49366, "waj": 49367, "undecided": 49368, "sown": 49369, "rcn": 49370, "northwales": 49371, "httr": 49372, "fumble": 49373, "dits": 49374, "compelled": 49375, "populist": 49376, "minted": 49377, "blanchett": 49378, ".''": 49379, "propulsion": 49380, "milla": 49381, "auberg": 49382, "hertz": 49383, "hta": 49384, "udaipur": 49385, "serendipity": 49386, "aztecs": 49387, "alsace": 49388, "ðŁIJij": 49389, "lun": 49390, "shoes": 49391, "charli": 49392, "garza": 49393, "ðŁēŁ": 49394, "probiotics": 49395, "foxtv": 49396, "olis": 49397, "miff": 49398, "localized": 49399, "diffuser": 49400, "sigue": 49401, "funko": 49402, "rendous": 49403, "ðŁēij": 49404, "jekyll": 49405, "<|startoftext|>": 49406, "<|endoftext|>": 49407} \ No newline at end of file diff --git a/pmid.hpp b/pmid.hpp new file mode 100644 index 000000000..ea9f02eb6 --- /dev/null +++ b/pmid.hpp @@ -0,0 +1,845 @@ +#ifndef __PMI_HPP__ +#define __PMI_HPP__ + +#include "ggml_extend.hpp" + +#include "clip.hpp" +#include "lora.hpp" + +struct FuseBlock : public GGMLBlock { + // network hparams + int in_dim; + int out_dim; + int hidden_dim; + bool use_residue; + +public: + FuseBlock(int i_d, int o_d, int h_d, bool use_residue = true) + : in_dim(i_d), out_dim(o_d), hidden_dim(h_d), use_residue(use_residue) { + blocks["fc1"] = std::shared_ptr(new Linear(in_dim, hidden_dim, true)); + blocks["fc2"] = std::shared_ptr(new Linear(hidden_dim, out_dim, true)); + blocks["layernorm"] = std::shared_ptr(new LayerNorm(in_dim)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, channels, h, w] + + auto fc1 = std::dynamic_pointer_cast(blocks["fc1"]); + auto fc2 = std::dynamic_pointer_cast(blocks["fc2"]); + auto layer_norm = std::dynamic_pointer_cast(blocks["layernorm"]); + + struct ggml_tensor* r = x; + // x = ggml_nn_layer_norm(ctx, x, ln_w, ln_b); + x = layer_norm->forward(ctx, x); + // x = ggml_add(ctx, ggml_mul_mat(ctx, fc1_w, x), fc1_b); + x = fc1->forward(ctx, x); + x = ggml_gelu_inplace(ctx, x); + x = fc2->forward(ctx, x); + // x = ggml_add(ctx, ggml_mul_mat(ctx, fc2_w, x), fc2_b); + if (use_residue) + x = ggml_add(ctx, x, r); + return x; + } +}; + +/* +class QFormerPerceiver(nn.Module): + def __init__(self, id_embeddings_dim, cross_attention_dim, num_tokens, embedding_dim=1024, use_residual=True, ratio=4): + super().__init__() + + self.num_tokens = num_tokens + self.cross_attention_dim = cross_attention_dim + self.use_residual = use_residual + print(cross_attention_dim*num_tokens) + self.token_proj = nn.Sequential( + nn.Linear(id_embeddings_dim, id_embeddings_dim*ratio), + nn.GELU(), + nn.Linear(id_embeddings_dim*ratio, cross_attention_dim*num_tokens), + ) + self.token_norm = nn.LayerNorm(cross_attention_dim) + self.perceiver_resampler = FacePerceiverResampler( + dim=cross_attention_dim, + depth=4, + dim_head=128, + heads=cross_attention_dim // 128, + embedding_dim=embedding_dim, + output_dim=cross_attention_dim, + ff_mult=4, + ) + + def forward(self, x, last_hidden_state): + x = self.token_proj(x) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.token_norm(x) # cls token + out = self.perceiver_resampler(x, last_hidden_state) # retrieve from patch tokens + if self.use_residual: # TODO: if use_residual is not true + out = x + 1.0 * out + return out +*/ + +struct PMFeedForward : public GGMLBlock { + // network hparams + int dim; + +public: + PMFeedForward(int d, int multi = 4) + : dim(d) { + int inner_dim = dim * multi; + blocks["0"] = std::shared_ptr(new LayerNorm(dim)); + blocks["1"] = std::shared_ptr(new Mlp(dim, inner_dim, dim, false)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x) { + auto norm = std::dynamic_pointer_cast(blocks["0"]); + auto ff = std::dynamic_pointer_cast(blocks["1"]); + + x = norm->forward(ctx, x); + x = ff->forward(ctx, x); + return x; + } +}; + +struct PerceiverAttention : public GGMLBlock { + // network hparams + float scale; // = dim_head**-0.5 + int dim_head; // = dim_head + int heads; // = heads +public: + PerceiverAttention(int dim, int dim_h = 64, int h = 8) + : scale(powf(dim_h, -0.5)), dim_head(dim_h), heads(h) { + int inner_dim = dim_head * heads; + blocks["norm1"] = std::shared_ptr(new LayerNorm(dim)); + blocks["norm2"] = std::shared_ptr(new LayerNorm(dim)); + blocks["to_q"] = std::shared_ptr(new Linear(dim, inner_dim, false)); + blocks["to_kv"] = std::shared_ptr(new Linear(dim, inner_dim * 2, false)); + blocks["to_out"] = std::shared_ptr(new Linear(inner_dim, dim, false)); + } + + struct ggml_tensor* reshape_tensor(struct ggml_context* ctx, + struct ggml_tensor* x, + int heads) { + int64_t ne[4]; + for (int i = 0; i < 4; ++i) + ne[i] = x->ne[i]; + // print_ggml_tensor(x, true, "PerceiverAttention reshape x 0: "); + // printf("heads = %d \n", heads); + // x = ggml_view_4d(ctx, x, x->ne[0], x->ne[1], heads, x->ne[2]/heads, + // x->nb[1], x->nb[2], x->nb[3], 0); + x = ggml_reshape_4d(ctx, x, x->ne[0] / heads, heads, x->ne[1], x->ne[2]); + // x = ggml_view_4d(ctx, x, x->ne[0]/heads, heads, x->ne[1], x->ne[2], + // x->nb[1], x->nb[2], x->nb[3], 0); + // x = ggml_cont(ctx, x); + x = ggml_cont(ctx, ggml_permute(ctx, x, 0, 2, 1, 3)); + // print_ggml_tensor(x, true, "PerceiverAttention reshape x 1: "); + // x = ggml_reshape_4d(ctx, x, ne[0], heads, ne[1], ne[2]/heads); + return x; + } + + std::vector chunk_half(struct ggml_context* ctx, + struct ggml_tensor* x) { + auto tlo = ggml_view_4d(ctx, x, x->ne[0] / 2, x->ne[1], x->ne[2], x->ne[3], x->nb[1], x->nb[2], x->nb[3], 0); + auto tli = ggml_view_4d(ctx, x, x->ne[0] / 2, x->ne[1], x->ne[2], x->ne[3], x->nb[1], x->nb[2], x->nb[3], x->nb[0] * x->ne[0] / 2); + return {ggml_cont(ctx, tlo), + ggml_cont(ctx, tli)}; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* latents) { + // x (torch.Tensor): image features + // shape (b, n1, D) + // latent (torch.Tensor): latent features + // shape (b, n2, D) + int64_t ne[4]; + for (int i = 0; i < 4; ++i) + ne[i] = latents->ne[i]; + + auto norm1 = std::dynamic_pointer_cast(blocks["norm1"]); + auto norm2 = std::dynamic_pointer_cast(blocks["norm2"]); + x = norm1->forward(ctx, x); + latents = norm2->forward(ctx, latents); + auto to_q = std::dynamic_pointer_cast(blocks["to_q"]); + auto q = to_q->forward(ctx, latents); + + auto kv_input = ggml_concat(ctx, x, latents, 1); + auto to_kv = std::dynamic_pointer_cast(blocks["to_kv"]); + auto kv = to_kv->forward(ctx, kv_input); + auto k = ggml_view_4d(ctx, kv, kv->ne[0] / 2, kv->ne[1], kv->ne[2], kv->ne[3], kv->nb[1] / 2, kv->nb[2] / 2, kv->nb[3] / 2, 0); + auto v = ggml_view_4d(ctx, kv, kv->ne[0] / 2, kv->ne[1], kv->ne[2], kv->ne[3], kv->nb[1] / 2, kv->nb[2] / 2, kv->nb[3] / 2, kv->nb[0] * (kv->ne[0] / 2)); + k = ggml_cont(ctx, k); + v = ggml_cont(ctx, v); + q = reshape_tensor(ctx, q, heads); + k = reshape_tensor(ctx, k, heads); + v = reshape_tensor(ctx, v, heads); + scale = 1.f / sqrt(sqrt((float)dim_head)); + k = ggml_scale_inplace(ctx, k, scale); + q = ggml_scale_inplace(ctx, q, scale); + // auto weight = ggml_mul_mat(ctx, q, k); + auto weight = ggml_mul_mat(ctx, k, q); // NOTE order of mul is opposite to pytorch + + // GGML's softmax() is equivalent to pytorch's softmax(x, dim=-1) + // in this case, dimension along which Softmax will be computed is the last dim + // in torch and the first dim in GGML, consistent with the convention that pytorch's + // last dimension (varying most rapidly) corresponds to GGML's first (varying most rapidly). + // weight = ggml_soft_max(ctx, weight); + weight = ggml_soft_max_inplace(ctx, weight); + v = ggml_cont(ctx, ggml_transpose(ctx, v)); + // auto out = ggml_mul_mat(ctx, weight, v); + auto out = ggml_mul_mat(ctx, v, weight); // NOTE order of mul is opposite to pytorch + out = ggml_cont(ctx, ggml_permute(ctx, out, 0, 2, 1, 3)); + out = ggml_reshape_3d(ctx, out, ne[0], ne[1], ggml_nelements(out) / (ne[0] * ne[1])); + auto to_out = std::dynamic_pointer_cast(blocks["to_out"]); + out = to_out->forward(ctx, out); + return out; + } +}; + +struct FacePerceiverResampler : public GGMLBlock { + // network hparams + int depth; + +public: + FacePerceiverResampler(int dim = 768, + int d = 4, + int dim_head = 64, + int heads = 16, + int embedding_dim = 1280, + int output_dim = 768, + int ff_mult = 4) + : depth(d) { + blocks["proj_in"] = std::shared_ptr(new Linear(embedding_dim, dim, true)); + blocks["proj_out"] = std::shared_ptr(new Linear(dim, output_dim, true)); + blocks["norm_out"] = std::shared_ptr(new LayerNorm(output_dim)); + + for (int i = 0; i < depth; i++) { + std::string name = "layers." + std::to_string(i) + ".0"; + blocks[name] = std::shared_ptr(new PerceiverAttention(dim, dim_head, heads)); + name = "layers." + std::to_string(i) + ".1"; + blocks[name] = std::shared_ptr(new PMFeedForward(dim, ff_mult)); + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* latents, + struct ggml_tensor* x) { + // x: [N, channels, h, w] + auto proj_in = std::dynamic_pointer_cast(blocks["proj_in"]); + auto proj_out = std::dynamic_pointer_cast(blocks["proj_out"]); + auto norm_out = std::dynamic_pointer_cast(blocks["norm_out"]); + + x = proj_in->forward(ctx, x); + for (int i = 0; i < depth; i++) { + std::string name = "layers." + std::to_string(i) + ".0"; + auto attn = std::dynamic_pointer_cast(blocks[name]); + name = "layers." + std::to_string(i) + ".1"; + auto ff = std::dynamic_pointer_cast(blocks[name]); + auto t = attn->forward(ctx, x, latents); + latents = ggml_add(ctx, t, latents); + t = ff->forward(ctx, latents); + latents = ggml_add(ctx, t, latents); + } + latents = proj_out->forward(ctx, latents); + latents = norm_out->forward(ctx, latents); + return latents; + } +}; + +struct QFormerPerceiver : public GGMLBlock { + // network hparams + int num_tokens; + int cross_attention_dim; + bool use_residul; + +public: + QFormerPerceiver(int id_embeddings_dim, int cross_attention_d, int num_t, int embedding_dim = 1024, bool use_r = true, int ratio = 4) + : cross_attention_dim(cross_attention_d), num_tokens(num_t), use_residul(use_r) { + blocks["token_proj"] = std::shared_ptr(new Mlp(id_embeddings_dim, + id_embeddings_dim * ratio, + cross_attention_dim * num_tokens, + true)); + blocks["token_norm"] = std::shared_ptr(new LayerNorm(cross_attention_d)); + blocks["perceiver_resampler"] = std::shared_ptr(new FacePerceiverResampler( + cross_attention_dim, + 4, + 128, + cross_attention_dim / 128, + embedding_dim, + cross_attention_dim, + 4)); + } + + /* + def forward(self, x, last_hidden_state): + x = self.token_proj(x) + x = x.reshape(-1, self.num_tokens, self.cross_attention_dim) + x = self.token_norm(x) # cls token + out = self.perceiver_resampler(x, last_hidden_state) # retrieve from patch tokens + if self.use_residual: # TODO: if use_residual is not true + out = x + 1.0 * out + return out + */ + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* last_hidden_state) { + // x: [N, channels, h, w] + auto token_proj = std::dynamic_pointer_cast(blocks["token_proj"]); + auto token_norm = std::dynamic_pointer_cast(blocks["token_norm"]); + auto perceiver_resampler = std::dynamic_pointer_cast(blocks["perceiver_resampler"]); + + x = token_proj->forward(ctx, x); + int64_t nel = ggml_nelements(x); + x = ggml_reshape_3d(ctx, x, cross_attention_dim, num_tokens, nel / (cross_attention_dim * num_tokens)); + x = token_norm->forward(ctx, x); + struct ggml_tensor* out = perceiver_resampler->forward(ctx, x, last_hidden_state); + if (use_residul) + out = ggml_add(ctx, x, out); + return out; + } +}; + +/* +class FacePerceiverResampler(torch.nn.Module): + def __init__( + self, + *, + dim=768, + depth=4, + dim_head=64, + heads=16, + embedding_dim=1280, + output_dim=768, + ff_mult=4, + ): + super().__init__() + + self.proj_in = torch.nn.Linear(embedding_dim, dim) + self.proj_out = torch.nn.Linear(dim, output_dim) + self.norm_out = torch.nn.LayerNorm(output_dim) + self.layers = torch.nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + torch.nn.ModuleList( + [ + PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ] + ) + ) + + def forward(self, latents, x): + x = self.proj_in(x) + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + latents = self.proj_out(latents) + return self.norm_out(latents) +*/ + +/* + +def FeedForward(dim, mult=4): + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + +def reshape_tensor(x, heads): + bs, length, width = x.shape + # (bs, length, width) --> (bs, length, n_heads, dim_per_head) + x = x.view(bs, length, heads, -1) + # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head) + x = x.transpose(1, 2) + # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head) + x = x.reshape(bs, heads, length, -1) + return x + +class PerceiverAttention(nn.Module): + def __init__(self, *, dim, dim_head=64, heads=8): + super().__init__() + self.scale = dim_head**-0.5 + self.dim_head = dim_head + self.heads = heads + inner_dim = dim_head * heads + + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + def forward(self, x, latents): + """ + Args: + x (torch.Tensor): image features + shape (b, n1, D) + latent (torch.Tensor): latent features + shape (b, n2, D) + """ + x = self.norm1(x) + latents = self.norm2(latents) + + b, l, _ = latents.shape + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + q = reshape_tensor(q, self.heads) + k = reshape_tensor(k, self.heads) + v = reshape_tensor(v, self.heads) + + # attention + scale = 1 / math.sqrt(math.sqrt(self.dim_head)) + weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards + weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype) + out = weight @ v + + out = out.permute(0, 2, 1, 3).reshape(b, l, -1) + + return self.to_out(out) + +*/ + +struct FuseModule : public GGMLBlock { + // network hparams + int embed_dim; + +public: + FuseModule(int imb_d) + : embed_dim(imb_d) { + blocks["mlp1"] = std::shared_ptr(new FuseBlock(imb_d * 2, imb_d, imb_d, false)); + blocks["mlp2"] = std::shared_ptr(new FuseBlock(imb_d, imb_d, imb_d, true)); + blocks["layer_norm"] = std::shared_ptr(new LayerNorm(embed_dim)); + } + + struct ggml_tensor* fuse_fn(struct ggml_context* ctx, + struct ggml_tensor* prompt_embeds, + struct ggml_tensor* id_embeds) { + auto mlp1 = std::dynamic_pointer_cast(blocks["mlp1"]); + auto mlp2 = std::dynamic_pointer_cast(blocks["mlp2"]); + auto layer_norm = std::dynamic_pointer_cast(blocks["layer_norm"]); + + // print_ggml_tensor(id_embeds, true, "Fuseblock id_embeds: "); + // print_ggml_tensor(prompt_embeds, true, "Fuseblock prompt_embeds: "); + + // auto prompt_embeds0 = ggml_cont(ctx, ggml_permute(ctx, prompt_embeds, 2, 0, 1, 3)); + // auto id_embeds0 = ggml_cont(ctx, ggml_permute(ctx, id_embeds, 2, 0, 1, 3)); + // print_ggml_tensor(id_embeds0, true, "Fuseblock id_embeds0: "); + // print_ggml_tensor(prompt_embeds0, true, "Fuseblock prompt_embeds0: "); + // concat is along dim 2 + // auto stacked_id_embeds = ggml_concat(ctx, prompt_embeds0, id_embeds0, 2); + auto stacked_id_embeds = ggml_concat(ctx, prompt_embeds, id_embeds, 0); + // print_ggml_tensor(stacked_id_embeds, true, "Fuseblock stacked_id_embeds 0: "); + // stacked_id_embeds = ggml_cont(ctx, ggml_permute(ctx, stacked_id_embeds, 1, 2, 0, 3)); + // print_ggml_tensor(stacked_id_embeds, true, "Fuseblock stacked_id_embeds 1: "); + // stacked_id_embeds = mlp1.forward(ctx, stacked_id_embeds); + // stacked_id_embeds = ggml_add(ctx, stacked_id_embeds, prompt_embeds); + // stacked_id_embeds = mlp2.forward(ctx, stacked_id_embeds); + // stacked_id_embeds = ggml_nn_layer_norm(ctx, stacked_id_embeds, ln_w, ln_b); + + stacked_id_embeds = mlp1->forward(ctx, stacked_id_embeds); + stacked_id_embeds = ggml_add(ctx, stacked_id_embeds, prompt_embeds); + stacked_id_embeds = mlp2->forward(ctx, stacked_id_embeds); + stacked_id_embeds = layer_norm->forward(ctx, stacked_id_embeds); + + // print_ggml_tensor(stacked_id_embeds, true, "Fuseblock stacked_id_embeds 1: "); + + return stacked_id_embeds; + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* prompt_embeds, + struct ggml_tensor* id_embeds, + struct ggml_tensor* class_tokens_mask, + struct ggml_tensor* class_tokens_mask_pos, + struct ggml_tensor* left, + struct ggml_tensor* right) { + // x: [N, channels, h, w] + + struct ggml_tensor* valid_id_embeds = id_embeds; + // # slice out the image token embeddings + // print_ggml_tensor(class_tokens_mask_pos, false); + ggml_set_name(class_tokens_mask_pos, "class_tokens_mask_pos"); + ggml_set_name(prompt_embeds, "prompt_embeds"); + // print_ggml_tensor(valid_id_embeds, true, "valid_id_embeds"); + // print_ggml_tensor(class_tokens_mask_pos, true, "class_tokens_mask_pos"); + struct ggml_tensor* image_token_embeds = ggml_get_rows(ctx, prompt_embeds, class_tokens_mask_pos); + ggml_set_name(image_token_embeds, "image_token_embeds"); + valid_id_embeds = ggml_reshape_2d(ctx, valid_id_embeds, valid_id_embeds->ne[0], + ggml_nelements(valid_id_embeds) / valid_id_embeds->ne[0]); + struct ggml_tensor* stacked_id_embeds = fuse_fn(ctx, image_token_embeds, valid_id_embeds); + + // stacked_id_embeds = ggml_cont(ctx, ggml_permute(ctx, stacked_id_embeds, 0, 2, 1, 3)); + // print_ggml_tensor(stacked_id_embeds, true, "AA stacked_id_embeds"); + // print_ggml_tensor(left, true, "AA left"); + // print_ggml_tensor(right, true, "AA right"); + if (left && right) { + stacked_id_embeds = ggml_concat(ctx, left, stacked_id_embeds, 1); + stacked_id_embeds = ggml_concat(ctx, stacked_id_embeds, right, 1); + } else if (left) { + stacked_id_embeds = ggml_concat(ctx, left, stacked_id_embeds, 1); + } else if (right) { + stacked_id_embeds = ggml_concat(ctx, stacked_id_embeds, right, 1); + } + // print_ggml_tensor(stacked_id_embeds, true, "BB stacked_id_embeds"); + // stacked_id_embeds = ggml_cont(ctx, ggml_permute(ctx, stacked_id_embeds, 0, 2, 1, 3)); + // print_ggml_tensor(stacked_id_embeds, true, "CC stacked_id_embeds"); + class_tokens_mask = ggml_cont(ctx, ggml_transpose(ctx, class_tokens_mask)); + class_tokens_mask = ggml_repeat(ctx, class_tokens_mask, prompt_embeds); + prompt_embeds = ggml_mul(ctx, prompt_embeds, class_tokens_mask); + struct ggml_tensor* updated_prompt_embeds = ggml_add(ctx, prompt_embeds, stacked_id_embeds); + ggml_set_name(updated_prompt_embeds, "updated_prompt_embeds"); + // print_ggml_tensor(updated_prompt_embeds, true, "updated_prompt_embeds: "); + return updated_prompt_embeds; + } +}; + +struct PhotoMakerIDEncoderBlock : public CLIPVisionModelProjection { + PhotoMakerIDEncoderBlock() + : CLIPVisionModelProjection(OPENAI_CLIP_VIT_L_14) { + blocks["visual_projection_2"] = std::shared_ptr(new Linear(1024, 1280, false)); + blocks["fuse_module"] = std::shared_ptr(new FuseModule(2048)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* id_pixel_values, + struct ggml_tensor* prompt_embeds, + struct ggml_tensor* class_tokens_mask, + struct ggml_tensor* class_tokens_mask_pos, + struct ggml_tensor* left, + struct ggml_tensor* right) { + // x: [N, channels, h, w] + auto vision_model = std::dynamic_pointer_cast(blocks["vision_model"]); + auto visual_projection = std::dynamic_pointer_cast(blocks["visual_projection"]); + auto visual_projection_2 = std::dynamic_pointer_cast(blocks["visual_projection_2"]); + auto fuse_module = std::dynamic_pointer_cast(blocks["fuse_module"]); + + struct ggml_tensor* shared_id_embeds = vision_model->forward(ctx, id_pixel_values); // [N, hidden_size] + struct ggml_tensor* id_embeds = visual_projection->forward(ctx, shared_id_embeds); // [N, proj_dim(768)] + struct ggml_tensor* id_embeds_2 = visual_projection_2->forward(ctx, shared_id_embeds); // [N, 1280] + + id_embeds = ggml_cont(ctx, ggml_permute(ctx, id_embeds, 2, 0, 1, 3)); + id_embeds_2 = ggml_cont(ctx, ggml_permute(ctx, id_embeds_2, 2, 0, 1, 3)); + + id_embeds = ggml_concat(ctx, id_embeds, id_embeds_2, 2); // [batch_size, seq_length, 1, 2048] check whether concat at dim 2 is right + id_embeds = ggml_cont(ctx, ggml_permute(ctx, id_embeds, 1, 2, 0, 3)); + + struct ggml_tensor* updated_prompt_embeds = fuse_module->forward(ctx, + prompt_embeds, + id_embeds, + class_tokens_mask, + class_tokens_mask_pos, + left, right); + return updated_prompt_embeds; + } +}; + +struct PhotoMakerIDEncoder_CLIPInsightfaceExtendtokenBlock : public CLIPVisionModelProjection { + int cross_attention_dim; + int num_tokens; + + PhotoMakerIDEncoder_CLIPInsightfaceExtendtokenBlock(int id_embeddings_dim = 512) + : CLIPVisionModelProjection(OPENAI_CLIP_VIT_L_14), + cross_attention_dim(2048), + num_tokens(2) { + blocks["visual_projection_2"] = std::shared_ptr(new Linear(1024, 1280, false)); + blocks["fuse_module"] = std::shared_ptr(new FuseModule(2048)); + /* + cross_attention_dim = 2048 + # projection + self.num_tokens = 2 + self.cross_attention_dim = cross_attention_dim + self.qformer_perceiver = QFormerPerceiver( + id_embeddings_dim, + cross_attention_dim, + self.num_tokens, + )*/ + blocks["qformer_perceiver"] = std::shared_ptr(new QFormerPerceiver(id_embeddings_dim, + cross_attention_dim, + num_tokens)); + } + + /* + def forward(self, id_pixel_values, prompt_embeds, class_tokens_mask, id_embeds): + b, num_inputs, c, h, w = id_pixel_values.shape + id_pixel_values = id_pixel_values.view(b * num_inputs, c, h, w) + + last_hidden_state = self.vision_model(id_pixel_values)[0] + id_embeds = id_embeds.view(b * num_inputs, -1) + + id_embeds = self.qformer_perceiver(id_embeds, last_hidden_state) + id_embeds = id_embeds.view(b, num_inputs, self.num_tokens, -1) + updated_prompt_embeds = self.fuse_module(prompt_embeds, id_embeds, class_tokens_mask) + */ + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* id_pixel_values, + struct ggml_tensor* prompt_embeds, + struct ggml_tensor* class_tokens_mask, + struct ggml_tensor* class_tokens_mask_pos, + struct ggml_tensor* id_embeds, + struct ggml_tensor* left, + struct ggml_tensor* right) { + // x: [N, channels, h, w] + auto vision_model = std::dynamic_pointer_cast(blocks["vision_model"]); + auto fuse_module = std::dynamic_pointer_cast(blocks["fuse_module"]); + auto qformer_perceiver = std::dynamic_pointer_cast(blocks["qformer_perceiver"]); + + // struct ggml_tensor* last_hidden_state = vision_model->forward(ctx, id_pixel_values); // [N, hidden_size] + struct ggml_tensor* last_hidden_state = vision_model->forward(ctx, id_pixel_values, false); // [N, hidden_size] + id_embeds = qformer_perceiver->forward(ctx, id_embeds, last_hidden_state); + + struct ggml_tensor* updated_prompt_embeds = fuse_module->forward(ctx, + prompt_embeds, + id_embeds, + class_tokens_mask, + class_tokens_mask_pos, + left, right); + return updated_prompt_embeds; + } +}; + +struct PhotoMakerIDEncoder : public GGMLRunner { +public: + SDVersion version = VERSION_SDXL; + PMVersion pm_version = PM_VERSION_1; + PhotoMakerIDEncoderBlock id_encoder; + PhotoMakerIDEncoder_CLIPInsightfaceExtendtokenBlock id_encoder2; + float style_strength; + + std::vector ctm; + std::vector ctmf16; + std::vector ctmpos; + + std::vector zeros_left_16; + std::vector zeros_left; + std::vector zeros_right_16; + std::vector zeros_right; + +public: + PhotoMakerIDEncoder(ggml_backend_t backend, std::map& tensor_types, const std::string prefix, SDVersion version = VERSION_SDXL, PMVersion pm_v = PM_VERSION_1, float sty = 20.f) + : GGMLRunner(backend), + version(version), + pm_version(pm_v), + style_strength(sty) { + if (pm_version == PM_VERSION_1) { + id_encoder.init(params_ctx, tensor_types, prefix); + } else if (pm_version == PM_VERSION_2) { + id_encoder2.init(params_ctx, tensor_types, prefix); + } + } + + std::string get_desc() { + return "pmid"; + } + + PMVersion get_version() const { + return pm_version; + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + if (pm_version == PM_VERSION_1) + id_encoder.get_param_tensors(tensors, prefix); + else if (pm_version == PM_VERSION_2) + id_encoder2.get_param_tensors(tensors, prefix); + } + + struct ggml_cgraph* build_graph( // struct ggml_allocr* allocr, + struct ggml_tensor* id_pixel_values, + struct ggml_tensor* prompt_embeds, + std::vector& class_tokens_mask, + struct ggml_tensor* id_embeds) { + ctm.clear(); + ctmf16.clear(); + ctmpos.clear(); + zeros_left.clear(); + zeros_left_16.clear(); + zeros_right.clear(); + zeros_right_16.clear(); + + ggml_context* ctx0 = compute_ctx; + + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + + int64_t hidden_size = prompt_embeds->ne[0]; + int64_t seq_length = prompt_embeds->ne[1]; + ggml_type type = GGML_TYPE_F32; + + struct ggml_tensor* class_tokens_mask_d = ggml_new_tensor_1d(ctx0, type, class_tokens_mask.size()); + + struct ggml_tensor* id_pixel_values_d = to_backend(id_pixel_values); + struct ggml_tensor* prompt_embeds_d = to_backend(prompt_embeds); + struct ggml_tensor* id_embeds_d = to_backend(id_embeds); + + struct ggml_tensor* left = NULL; + struct ggml_tensor* right = NULL; + for (int i = 0; i < class_tokens_mask.size(); i++) { + if (class_tokens_mask[i]) { + // printf(" 1,"); + ctm.push_back(0.f); // here use 0.f instead of 1.f to make a scale mask + ctmf16.push_back(ggml_fp32_to_fp16(0.f)); // here use 0.f instead of 1.f to make a scale mask + ctmpos.push_back(i); + } else { + // printf(" 0,"); + ctm.push_back(1.f); // here use 1.f instead of 0.f to make a scale mask + ctmf16.push_back(ggml_fp32_to_fp16(1.f)); // here use 0.f instead of 1.f to make a scale mask + } + } + // printf("\n"); + if (ctmpos[0] > 0) { + // left = ggml_new_tensor_3d(ctx0, type, hidden_size, 1, ctmpos[0]); + left = ggml_new_tensor_3d(ctx0, type, hidden_size, ctmpos[0], 1); + } + if (ctmpos[ctmpos.size() - 1] < seq_length - 1) { + // right = ggml_new_tensor_3d(ctx0, type, + // hidden_size, 1, seq_length - ctmpos[ctmpos.size() - 1] - 1); + right = ggml_new_tensor_3d(ctx0, type, + hidden_size, seq_length - ctmpos[ctmpos.size() - 1] - 1, 1); + } + struct ggml_tensor* class_tokens_mask_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, ctmpos.size()); + + { + if (type == GGML_TYPE_F16) + set_backend_tensor_data(class_tokens_mask_d, ctmf16.data()); + else + set_backend_tensor_data(class_tokens_mask_d, ctm.data()); + set_backend_tensor_data(class_tokens_mask_pos, ctmpos.data()); + if (left) { + if (type == GGML_TYPE_F16) { + for (int i = 0; i < ggml_nelements(left); ++i) + zeros_left_16.push_back(ggml_fp32_to_fp16(0.f)); + set_backend_tensor_data(left, zeros_left_16.data()); + } else { + for (int i = 0; i < ggml_nelements(left); ++i) + zeros_left.push_back(0.f); + set_backend_tensor_data(left, zeros_left.data()); + } + } + if (right) { + if (type == GGML_TYPE_F16) { + for (int i = 0; i < ggml_nelements(right); ++i) + zeros_right_16.push_back(ggml_fp32_to_fp16(0.f)); + set_backend_tensor_data(right, zeros_right_16.data()); + } else { + for (int i = 0; i < ggml_nelements(right); ++i) + zeros_right.push_back(0.f); + set_backend_tensor_data(right, zeros_right.data()); + } + } + } + struct ggml_tensor* updated_prompt_embeds = NULL; + if (pm_version == PM_VERSION_1) + updated_prompt_embeds = id_encoder.forward(ctx0, + id_pixel_values_d, + prompt_embeds_d, + class_tokens_mask_d, + class_tokens_mask_pos, + left, right); + else if (pm_version == PM_VERSION_2) + updated_prompt_embeds = id_encoder2.forward(ctx0, + id_pixel_values_d, + prompt_embeds_d, + class_tokens_mask_d, + class_tokens_mask_pos, + id_embeds_d, + left, right); + + ggml_build_forward_expand(gf, updated_prompt_embeds); + + return gf; + } + + void compute(const int n_threads, + struct ggml_tensor* id_pixel_values, + struct ggml_tensor* prompt_embeds, + struct ggml_tensor* id_embeds, + std::vector& class_tokens_mask, + struct ggml_tensor** updated_prompt_embeds, + ggml_context* output_ctx) { + auto get_graph = [&]() -> struct ggml_cgraph* { + // return build_graph(compute_allocr, id_pixel_values, prompt_embeds, class_tokens_mask); + return build_graph(id_pixel_values, prompt_embeds, class_tokens_mask, id_embeds); + }; + + // GGMLRunner::compute(get_graph, n_threads, updated_prompt_embeds); + GGMLRunner::compute(get_graph, n_threads, true, updated_prompt_embeds, output_ctx); + } +}; + +struct PhotoMakerIDEmbed : public GGMLRunner { + std::map tensors; + std::string file_path; + ModelLoader* model_loader; + bool load_failed = false; + bool applied = false; + + PhotoMakerIDEmbed(ggml_backend_t backend, + ModelLoader* ml, + const std::string& file_path = "", + const std::string& prefix = "") + : file_path(file_path), GGMLRunner(backend), model_loader(ml) { + if (!model_loader->init_from_file(file_path, prefix)) { + load_failed = true; + } + } + + std::string get_desc() { + return "id_embeds"; + } + + bool load_from_file(bool filter_tensor = false) { + LOG_INFO("loading PhotoMaker ID Embeds from '%s'", file_path.c_str()); + + if (load_failed) { + LOG_ERROR("init photomaker id embed from file failed: '%s'", file_path.c_str()); + return false; + } + + bool dry_run = true; + auto on_new_tensor_cb = [&](const TensorStorage& tensor_storage, ggml_tensor** dst_tensor) -> bool { + const std::string& name = tensor_storage.name; + + if (filter_tensor && !contains(name, "pmid.id_embeds")) { + // LOG_INFO("skipping LoRA tesnor '%s'", name.c_str()); + return true; + } + if (dry_run) { + struct ggml_tensor* real = ggml_new_tensor(params_ctx, + tensor_storage.type, + tensor_storage.n_dims, + tensor_storage.ne); + tensors[name] = real; + } else { + auto real = tensors[name]; + *dst_tensor = real; + } + + return true; + }; + + model_loader->load_tensors(on_new_tensor_cb, backend); + alloc_params_buffer(); + + dry_run = false; + model_loader->load_tensors(on_new_tensor_cb, backend); + + LOG_DEBUG("finished loading PhotoMaker ID Embeds "); + return true; + } + + struct ggml_tensor* get() { + std::map::iterator pos; + pos = tensors.find("pmid.id_embeds"); + if (pos != tensors.end()) + return pos->second; + return NULL; + } +}; + +#endif // __PMI_HPP__ diff --git a/preprocessing.hpp b/preprocessing.hpp new file mode 100644 index 000000000..4ea1dbab9 --- /dev/null +++ b/preprocessing.hpp @@ -0,0 +1,227 @@ +#ifndef __PREPROCESSING_HPP__ +#define __PREPROCESSING_HPP__ + +#include "ggml_extend.hpp" +#define M_PI_ 3.14159265358979323846 + +void convolve(struct ggml_tensor* input, struct ggml_tensor* output, struct ggml_tensor* kernel, int padding) { + struct ggml_init_params params; + params.mem_size = 20 * 1024 * 1024; // 10 + params.mem_buffer = NULL; + params.no_alloc = false; + struct ggml_context* ctx0 = ggml_init(params); + struct ggml_tensor* kernel_fp16 = ggml_new_tensor_4d(ctx0, GGML_TYPE_F16, kernel->ne[0], kernel->ne[1], 1, 1); + ggml_fp32_to_fp16_row((float*)kernel->data, (ggml_fp16_t*)kernel_fp16->data, ggml_nelements(kernel)); + ggml_tensor* h = ggml_conv_2d(ctx0, kernel_fp16, input, 1, 1, padding, padding, 1, 1); + ggml_cgraph* gf = ggml_new_graph(ctx0); + ggml_build_forward_expand(gf, ggml_cpy(ctx0, h, output)); + ggml_graph_compute_with_ctx(ctx0, gf, 1); + ggml_free(ctx0); +} + +void gaussian_kernel(struct ggml_tensor* kernel) { + int ks_mid = kernel->ne[0] / 2; + float sigma = 1.4f; + float normal = 1.f / (2.0f * M_PI_ * powf(sigma, 2.0f)); + for (int y = 0; y < kernel->ne[0]; y++) { + float gx = -ks_mid + y; + for (int x = 0; x < kernel->ne[1]; x++) { + float gy = -ks_mid + x; + float k_ = expf(-((gx * gx + gy * gy) / (2.0f * powf(sigma, 2.0f)))) * normal; + ggml_tensor_set_f32(kernel, k_, x, y); + } + } +} + +void grayscale(struct ggml_tensor* rgb_img, struct ggml_tensor* grayscale) { + for (int iy = 0; iy < rgb_img->ne[1]; iy++) { + for (int ix = 0; ix < rgb_img->ne[0]; ix++) { + float r = ggml_tensor_get_f32(rgb_img, ix, iy); + float g = ggml_tensor_get_f32(rgb_img, ix, iy, 1); + float b = ggml_tensor_get_f32(rgb_img, ix, iy, 2); + float gray = 0.2989f * r + 0.5870f * g + 0.1140f * b; + ggml_tensor_set_f32(grayscale, gray, ix, iy); + } + } +} + +void prop_hypot(struct ggml_tensor* x, struct ggml_tensor* y, struct ggml_tensor* h) { + int n_elements = ggml_nelements(h); + float* dx = (float*)x->data; + float* dy = (float*)y->data; + float* dh = (float*)h->data; + for (int i = 0; i < n_elements; i++) { + dh[i] = sqrtf(dx[i] * dx[i] + dy[i] * dy[i]); + } +} + +void prop_arctan2(struct ggml_tensor* x, struct ggml_tensor* y, struct ggml_tensor* h) { + int n_elements = ggml_nelements(h); + float* dx = (float*)x->data; + float* dy = (float*)y->data; + float* dh = (float*)h->data; + for (int i = 0; i < n_elements; i++) { + dh[i] = atan2f(dy[i], dx[i]); + } +} + +void normalize_tensor(struct ggml_tensor* g) { + int n_elements = ggml_nelements(g); + float* dg = (float*)g->data; + float max = -INFINITY; + for (int i = 0; i < n_elements; i++) { + max = dg[i] > max ? dg[i] : max; + } + max = 1.0f / max; + for (int i = 0; i < n_elements; i++) { + dg[i] *= max; + } +} + +void non_max_supression(struct ggml_tensor* result, struct ggml_tensor* G, struct ggml_tensor* D) { + for (int iy = 1; iy < result->ne[1] - 1; iy++) { + for (int ix = 1; ix < result->ne[0] - 1; ix++) { + float angle = ggml_tensor_get_f32(D, ix, iy) * 180.0f / M_PI_; + angle = angle < 0.0f ? angle += 180.0f : angle; + float q = 1.0f; + float r = 1.0f; + + // angle 0 + if ((0 >= angle && angle < 22.5f) || (157.5f >= angle && angle <= 180)) { + q = ggml_tensor_get_f32(G, ix, iy + 1); + r = ggml_tensor_get_f32(G, ix, iy - 1); + } + // angle 45 + else if (22.5f >= angle && angle < 67.5f) { + q = ggml_tensor_get_f32(G, ix + 1, iy - 1); + r = ggml_tensor_get_f32(G, ix - 1, iy + 1); + } + // angle 90 + else if (67.5f >= angle && angle < 112.5) { + q = ggml_tensor_get_f32(G, ix + 1, iy); + r = ggml_tensor_get_f32(G, ix - 1, iy); + } + // angle 135 + else if (112.5 >= angle && angle < 157.5f) { + q = ggml_tensor_get_f32(G, ix - 1, iy - 1); + r = ggml_tensor_get_f32(G, ix + 1, iy + 1); + } + + float cur = ggml_tensor_get_f32(G, ix, iy); + if ((cur >= q) && (cur >= r)) { + ggml_tensor_set_f32(result, cur, ix, iy); + } else { + ggml_tensor_set_f32(result, 0.0f, ix, iy); + } + } + } +} + +void threshold_hystersis(struct ggml_tensor* img, float high_threshold, float low_threshold, float weak, float strong) { + int n_elements = ggml_nelements(img); + float* imd = (float*)img->data; + float max = -INFINITY; + for (int i = 0; i < n_elements; i++) { + max = imd[i] > max ? imd[i] : max; + } + float ht = max * high_threshold; + float lt = ht * low_threshold; + for (int i = 0; i < n_elements; i++) { + float img_v = imd[i]; + if (img_v >= ht) { // strong pixel + imd[i] = strong; + } else if (img_v <= ht && img_v >= lt) { // strong pixel + imd[i] = weak; + } + } + + for (int iy = 0; iy < img->ne[1]; iy++) { + for (int ix = 0; ix < img->ne[0]; ix++) { + if (ix >= 3 && ix <= img->ne[0] - 3 && iy >= 3 && iy <= img->ne[1] - 3) { + ggml_tensor_set_f32(img, ggml_tensor_get_f32(img, ix, iy), ix, iy); + } else { + ggml_tensor_set_f32(img, 0.0f, ix, iy); + } + } + } + + // hysteresis + for (int iy = 1; iy < img->ne[1] - 1; iy++) { + for (int ix = 1; ix < img->ne[0] - 1; ix++) { + float imd_v = ggml_tensor_get_f32(img, ix, iy); + if (imd_v == weak) { + if (ggml_tensor_get_f32(img, ix + 1, iy - 1) == strong || ggml_tensor_get_f32(img, ix + 1, iy) == strong || + ggml_tensor_get_f32(img, ix, iy - 1) == strong || ggml_tensor_get_f32(img, ix, iy + 1) == strong || + ggml_tensor_get_f32(img, ix - 1, iy - 1) == strong || ggml_tensor_get_f32(img, ix - 1, iy) == strong) { + ggml_tensor_set_f32(img, strong, ix, iy); + } else { + ggml_tensor_set_f32(img, 0.0f, ix, iy); + } + } + } + } +} + +uint8_t* preprocess_canny(uint8_t* img, int width, int height, float high_threshold, float low_threshold, float weak, float strong, bool inverse) { + struct ggml_init_params params; + params.mem_size = static_cast(10 * 1024 * 1024); // 10 + params.mem_buffer = NULL; + params.no_alloc = false; + struct ggml_context* work_ctx = ggml_init(params); + + if (!work_ctx) { + LOG_ERROR("ggml_init() failed"); + return NULL; + } + + float kX[9] = { + -1, 0, 1, + -2, 0, 2, + -1, 0, 1}; + + float kY[9] = { + 1, 2, 1, + 0, 0, 0, + -1, -2, -1}; + + // generate kernel + int kernel_size = 5; + struct ggml_tensor* gkernel = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, kernel_size, kernel_size, 1, 1); + struct ggml_tensor* sf_kx = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 3, 3, 1, 1); + memcpy(sf_kx->data, kX, ggml_nbytes(sf_kx)); + struct ggml_tensor* sf_ky = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 3, 3, 1, 1); + memcpy(sf_ky->data, kY, ggml_nbytes(sf_ky)); + gaussian_kernel(gkernel); + struct ggml_tensor* image = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 3, 1); + struct ggml_tensor* image_gray = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 1, 1); + struct ggml_tensor* iX = ggml_dup_tensor(work_ctx, image_gray); + struct ggml_tensor* iY = ggml_dup_tensor(work_ctx, image_gray); + struct ggml_tensor* G = ggml_dup_tensor(work_ctx, image_gray); + struct ggml_tensor* tetha = ggml_dup_tensor(work_ctx, image_gray); + sd_image_to_tensor(img, image); + grayscale(image, image_gray); + convolve(image_gray, image_gray, gkernel, 2); + convolve(image_gray, iX, sf_kx, 1); + convolve(image_gray, iY, sf_ky, 1); + prop_hypot(iX, iY, G); + normalize_tensor(G); + prop_arctan2(iX, iY, tetha); + non_max_supression(image_gray, G, tetha); + threshold_hystersis(image_gray, high_threshold, low_threshold, weak, strong); + // to RGB channels + for (int iy = 0; iy < height; iy++) { + for (int ix = 0; ix < width; ix++) { + float gray = ggml_tensor_get_f32(image_gray, ix, iy); + gray = inverse ? 1.0f - gray : gray; + ggml_tensor_set_f32(image, gray, ix, iy); + ggml_tensor_set_f32(image, gray, ix, iy, 1); + ggml_tensor_set_f32(image, gray, ix, iy, 2); + } + } + free(img); + uint8_t* output = sd_tensor_to_image(image); + ggml_free(work_ctx); + return output; +} + +#endif // __PREPROCESSING_HPP__ \ No newline at end of file diff --git a/rng.hpp b/rng.hpp new file mode 100644 index 000000000..3340be618 --- /dev/null +++ b/rng.hpp @@ -0,0 +1,35 @@ +#ifndef __RNG_H__ +#define __RNG_H__ + +#include +#include + +class RNG { +public: + virtual void manual_seed(uint64_t seed) = 0; + virtual std::vector randn(uint32_t n) = 0; +}; + +class STDDefaultRNG : public RNG { +private: + std::default_random_engine generator; + +public: + void manual_seed(uint64_t seed) { + generator.seed((unsigned int)seed); + } + + std::vector randn(uint32_t n) { + std::vector result; + float mean = 0.0; + float stddev = 1.0; + std::normal_distribution distribution(mean, stddev); + for (uint32_t i = 0; i < n; i++) { + float random_number = distribution(generator); + result.push_back(random_number); + } + return result; + } +}; + +#endif // __RNG_H__ \ No newline at end of file diff --git a/rng_philox.hpp b/rng_philox.hpp new file mode 100644 index 000000000..33fea9c5b --- /dev/null +++ b/rng_philox.hpp @@ -0,0 +1,125 @@ +#ifndef __RNG_PHILOX_H__ +#define __RNG_PHILOX_H__ + +#include +#include + +#include "rng.hpp" + +// RNG imitiating torch cuda randn on CPU. +// Port from: https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/5ef669de080814067961f28357256e8fe27544f4/modules/rng_philox.py +class PhiloxRNG : public RNG { +private: + uint64_t seed; + uint32_t offset; + +private: + std::vector philox_m = {0xD2511F53, 0xCD9E8D57}; + std::vector philox_w = {0x9E3779B9, 0xBB67AE85}; + float two_pow32_inv = 2.3283064e-10f; + float two_pow32_inv_2pi = 2.3283064e-10f * 6.2831855f; + + std::vector uint32(uint64_t x) { + std::vector result(2); + result[0] = static_cast(x & 0xFFFFFFFF); + result[1] = static_cast(x >> 32); + return result; + } + + std::vector> uint32(const std::vector& x) { + uint32_t N = (uint32_t)x.size(); + std::vector> result(2, std::vector(N)); + + for (uint32_t i = 0; i < N; ++i) { + result[0][i] = static_cast(x[i] & 0xFFFFFFFF); + result[1][i] = static_cast(x[i] >> 32); + } + + return result; + } + + // A single round of the Philox 4x32 random number generator. + void philox4_round(std::vector>& counter, + const std::vector>& key) { + uint32_t N = (uint32_t)counter[0].size(); + for (uint32_t i = 0; i < N; i++) { + std::vector v1 = uint32(static_cast(counter[0][i]) * static_cast(philox_m[0])); + std::vector v2 = uint32(static_cast(counter[2][i]) * static_cast(philox_m[1])); + + counter[0][i] = v2[1] ^ counter[1][i] ^ key[0][i]; + counter[1][i] = v2[0]; + counter[2][i] = v1[1] ^ counter[3][i] ^ key[1][i]; + counter[3][i] = v1[0]; + } + } + + // Generates 32-bit random numbers using the Philox 4x32 random number generator. + // Parameters: + // counter : A 4xN array of 32-bit integers representing the counter values (offset into generation). + // key : A 2xN array of 32-bit integers representing the key values (seed). + // rounds : The number of rounds to perform. + // Returns: + // std::vector>: A 4xN array of 32-bit integers containing the generated random numbers. + std::vector> philox4_32(std::vector>& counter, + std::vector>& key, + int rounds = 10) { + uint32_t N = (uint32_t)counter[0].size(); + for (int i = 0; i < rounds - 1; ++i) { + philox4_round(counter, key); + + for (uint32_t j = 0; j < N; ++j) { + key[0][j] += philox_w[0]; + key[1][j] += philox_w[1]; + } + } + + philox4_round(counter, key); + return counter; + } + + float box_muller(float x, float y) { + float u = x * two_pow32_inv + two_pow32_inv / 2; + float v = y * two_pow32_inv_2pi + two_pow32_inv_2pi / 2; + + float s = sqrt(-2.0f * log(u)); + + float r1 = s * sin(v); + return r1; + } + +public: + PhiloxRNG(uint64_t seed = 0) { + this->seed = seed; + this->offset = 0; + } + + void manual_seed(uint64_t seed) { + this->seed = seed; + this->offset = 0; + } + + std::vector randn(uint32_t n) { + std::vector> counter(4, std::vector(n, 0)); + for (uint32_t i = 0; i < n; i++) { + counter[0][i] = this->offset; + } + + for (uint32_t i = 0; i < n; i++) { + counter[2][i] = i; + } + this->offset += 1; + + std::vector key(n, this->seed); + std::vector> key_uint32 = uint32(key); + + std::vector> g = philox4_32(counter, key_uint32); + + std::vector result; + for (uint32_t i = 0; i < n; ++i) { + result.push_back(box_muller((float)g[0][i], (float)g[1][i])); + } + return result; + } +}; + +#endif // __RNG_PHILOX_H__ \ No newline at end of file diff --git a/stable-diffusion.cpp b/stable-diffusion.cpp index 4c6b33f46..9c8265727 100644 --- a/stable-diffusion.cpp +++ b/stable-diffusion.cpp @@ -1,3405 +1,1069 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "ggml/ggml.h" -#include "stable-diffusion.h" - -static SDLogLevel log_level = SDLogLevel::INFO; - -#define __FILENAME__ "stable-diffusion.cpp" -#define SD_LOG(level, format, ...) \ - do { \ - if (level < log_level) { \ - break; \ - } \ - if (level == SDLogLevel::DEBUG) { \ - printf("[DEBUG] %s:%-4d - " format "\n", __FILENAME__, __LINE__, ##__VA_ARGS__); \ - } else if (level == SDLogLevel::INFO) { \ - printf("[INFO] %s:%-4d - " format "\n", __FILENAME__, __LINE__, ##__VA_ARGS__); \ - } else if (level == SDLogLevel::WARN) { \ - fprintf(stderr, "[WARN] %s:%-4d - " format "\n", __FILENAME__, __LINE__, ##__VA_ARGS__); \ - } else if (level == SDLogLevel::ERROR) { \ - fprintf(stderr, "[ERROR] %s:%-4d - " format "\n", __FILENAME__, __LINE__, ##__VA_ARGS__); \ - } \ - } while (0) - -#define LOG_DEBUG(format, ...) SD_LOG(SDLogLevel::DEBUG, format, ##__VA_ARGS__) -#define LOG_INFO(format, ...) SD_LOG(SDLogLevel::INFO, format, ##__VA_ARGS__) -#define LOG_WARN(format, ...) SD_LOG(SDLogLevel::WARN, format, ##__VA_ARGS__) -#define LOG_ERROR(format, ...) SD_LOG(SDLogLevel::ERROR, format, ##__VA_ARGS__) - -#define GGML_FILE_MAGIC 0x67676d6c - -#define TIMESTEPS 1000 - -/*================================================== Helper Functions ================================================*/ - -void set_sd_log_level(SDLogLevel level) { - log_level = level; -} - -std::string sd_get_system_info() { - std::stringstream ss; - ss << "System Info: \n"; - ss << " BLAS = " << ggml_cpu_has_blas() << std::endl; - ss << " SSE3 = " << ggml_cpu_has_sse3() << std::endl; - ss << " AVX = " << ggml_cpu_has_avx() << std::endl; - ss << " AVX2 = " << ggml_cpu_has_avx2() << std::endl; - ss << " AVX512 = " << ggml_cpu_has_avx512() << std::endl; - ss << " AVX512_VBMI = " << ggml_cpu_has_avx512_vbmi() << std::endl; - ss << " AVX512_VNNI = " << ggml_cpu_has_avx512_vnni() << std::endl; - ss << " FMA = " << ggml_cpu_has_fma() << std::endl; - ss << " NEON = " << ggml_cpu_has_neon() << std::endl; - ss << " ARM_FMA = " << ggml_cpu_has_arm_fma() << std::endl; - ss << " F16C = " << ggml_cpu_has_f16c() << std::endl; - ss << " FP16_VA = " << ggml_cpu_has_fp16_va() << std::endl; - ss << " WASM_SIMD = " << ggml_cpu_has_wasm_simd() << std::endl; - ss << " VSX = " << ggml_cpu_has_vsx() << std::endl; - return ss.str(); -} - -ggml_tensor* load_tensor_from_file(ggml_context* ctx, const std::string& file_path) { - std::ifstream file(file_path, std::ios::binary); - if (!file.is_open()) { - LOG_ERROR("failed to open '%s'", file_path.c_str()); - return NULL; - } - int32_t n_dims; - int32_t length; - int32_t ttype; - - file.read(reinterpret_cast(&n_dims), sizeof(n_dims)); - file.read(reinterpret_cast(&length), sizeof(length)); - file.read(reinterpret_cast(&ttype), sizeof(ttype)); - - if (file.eof()) { - LOG_ERROR("incomplete file '%s'", file_path.c_str()); - return NULL; - } - - int32_t nelements = 1; - int32_t ne[4] = {1, 1, 1, 1}; - for (int i = 0; i < n_dims; ++i) { - file.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); - nelements *= ne[i]; - } - std::string name(length, 0); - file.read(&name[0], length); - ggml_tensor* tensor = ggml_new_tensor_4d(ctx, (ggml_type)ttype, ne[0], ne[1], ne[2], ne[3]); - const size_t bpe = ggml_type_size(ggml_type(ttype)); - file.read(reinterpret_cast(tensor->data), ggml_nbytes(tensor)); - return tensor; -} - -static std::default_random_engine generator; - -void set_random_seed(int seed) { - generator.seed(seed); -} - -void ggml_tensor_set_f32_randn(struct ggml_tensor* tensor) { - float mean = 0.0; - float stddev = 1.0; - std::normal_distribution distribution(mean, stddev); - for (int i = 0; i < ggml_nelements(tensor); i++) { - float random_number = distribution(generator); - ggml_set_f32_1d(tensor, i, random_number); - } -} - -// set tensor[i, j, k, l] -// set tensor[l] -// set tensor[k, l] -// set tensor[j, k, l] -void ggml_tensor_set_f32(struct ggml_tensor* tensor, float value, int l, int k = 0, int j = 0, int i = 0) { - GGML_ASSERT(tensor->nb[0] == sizeof(float)); - *(float*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]) = value; -} - -float ggml_tensor_get_f32(const ggml_tensor* tensor, int l, int k = 0, int j = 0, int i = 0) { - GGML_ASSERT(tensor->nb[0] == sizeof(float)); - return *(float*)((char*)(tensor->data) + i * tensor->nb[3] + j * tensor->nb[2] + k * tensor->nb[1] + l * tensor->nb[0]); -} - -void print_ggml_tensor(struct ggml_tensor* tensor, bool shape_only = false) { - printf("shape(%zu, %zu, %zu, %zu)\n", tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); - if (shape_only) { - return; - } - int range = 3; - for (int i = 0; i < tensor->ne[3]; i++) { - if (i >= range && i + range < tensor->ne[3]) { - continue; - } - for (int j = 0; j < tensor->ne[2]; j++) { - if (j >= range && j + range < tensor->ne[2]) { - continue; - } - for (int k = 0; k < tensor->ne[1]; k++) { - if (k >= range && k + range < tensor->ne[1]) { - continue; - } - for (int l = 0; l < tensor->ne[0]; l++) { - if (l >= range && l + range < tensor->ne[0]) { - continue; - } - printf(" [%d, %d, %d, %d] = %f\n", i, j, k, l, ggml_tensor_get_f32(tensor, l, k, j, i)); - } - } - } - } -} - -void copy_ggml_tensor( - struct ggml_tensor* dst, - const struct ggml_tensor* src) { - dst->nb[0] = src->nb[0]; - dst->nb[1] = src->nb[1]; - dst->nb[2] = src->nb[2]; - dst->nb[3] = src->nb[3]; - - memcpy(((char*)dst->data), ((char*)src->data), ggml_nbytes(dst)); -} - -// Ref: https://github.com/CompVis/stable-diffusion/blob/main/ldm/modules/diffusionmodules/util.py#L151 -void set_timestep_embedding(struct ggml_tensor* timesteps, struct ggml_tensor* embedding, int dim, int max_period = 10000) { - // timesteps: [N,] - // embedding: [(dim + 1)/2, N] - int half = dim / 2; - std::vector freqs(half); - for (int i = 0; i < half; ++i) { - freqs[i] = (float)std::exp(-std::log(max_period) * i / half); - } - for (int i = 0; i < timesteps->ne[0]; ++i) { - for (int j = 0; j < half; ++j) { - float arg = ggml_get_f32_1d(timesteps, i) * freqs[j]; - ggml_tensor_set_f32(embedding, std::cos(arg), j, i); - ggml_tensor_set_f32(embedding, std::sin(arg), j + half, i); - } - if (dim % 2 != 0) { - *(float*)((char*)embedding->data + i * embedding->nb[1] + dim * embedding->nb[0]) = 0; - } - } -} - -struct ggml_tensor* new_timestep_embedding(struct ggml_context* ctx, struct ggml_tensor* timesteps, int dim, int max_period = 10000) { - // timesteps: [N,] - // embedding: [(dim + 1)/2, N] - int acutual_dim = dim; - if (dim % 2 != 0) { - acutual_dim = dim + 1; - } - struct ggml_tensor* embedding = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, acutual_dim, timesteps->ne[0]); - if (!ggml_get_no_alloc(ctx)) { - set_timestep_embedding(timesteps, embedding, dim, max_period); - } - return embedding; -} - -std::vector ggml_to_image_vec(struct ggml_tensor* t) { - int64_t w = t->ne[0]; - int64_t h = t->ne[1]; - int64_t c = t->ne[2]; - std::vector vec; - vec.resize(w * h * c); - uint8_t* data = (uint8_t*)vec.data(); - for (int i = 0; i < h; i++) { - for (int j = 0; j < w; j++) { - for (int k = 0; k < c; k++) { - float value = ggml_tensor_get_f32(t, j, i, k); - value = (value + 1.0f) * 0.5f; - if (value < 0) { - value = 0; - } else if (value > 1) { - value = 1; - } - value *= 255.f; - *(data + i * w * c + j * c + k) = (uint8_t)value; - } - } - } - return vec; -} - -void image_vec_to_ggml(const std::vector& vec, - struct ggml_tensor* t) { - int64_t w = t->ne[0]; - int64_t h = t->ne[1]; - int64_t c = t->ne[2]; - uint8_t* data = (uint8_t*)vec.data(); - for (int i = 0; i < h; i++) { - for (int j = 0; j < w; j++) { - for (int k = 0; k < c; k++) { - float value = *(data + i * w * c + j * c + k); - value = value / 255.f; - value = 2 * value - 1; - ggml_tensor_set_f32(t, value, j, i, k); - } - } - } -} - -/*================================================== CLIPTokenizer ===================================================*/ - -const std::string UNK_TOKEN = "<|endoftext|>"; -const std::string BOS_TOKEN = "<|startoftext|>"; -const std::string EOS_TOKEN = "<|endoftext|>"; -const std::string PAD_TOEKN = "<|endoftext|>"; - -const int UNK_TOKEN_ID = 49407; -const int BOS_TOKEN_ID = 49406; -const int EOS_TOKEN_ID = 49407; -const int PAD_TOKEN_ID = 49407; - -// Ref: https://github.com/openai/CLIP/blob/main/clip/simple_tokenizer.py -// TODO: implement bpe -class CLIPTokenizer { - private: - std::map encoder; - std::regex pat; - - static std::string strip(const std::string& str) { - std::string::size_type start = str.find_first_not_of(" \t\n\r\v\f"); - std::string::size_type end = str.find_last_not_of(" \t\n\r\v\f"); - - if (start == std::string::npos) { - // String contains only whitespace characters - return ""; - } - - return str.substr(start, end - start + 1); - } - - static std::string whitespace_clean(std::string text) { - text = std::regex_replace(text, std::regex(R"(\s+)"), " "); - text = strip(text); - return text; - } - - public: - CLIPTokenizer() = default; - std::string bpe(std::string token) { - std::string word = token + ""; - if (encoder.find(word) != encoder.end()) { - return word; - } else if (encoder.find(token) != encoder.end()) { - return token; - } - return UNK_TOKEN; - } - - void add_token(std::string token, int32_t token_id) { - encoder[token] = token_id; - } - - std::vector tokenize(std::string text, size_t max_length = 0, bool padding = false) { - std::vector tokens = encode(text); - tokens.insert(tokens.begin(), BOS_TOKEN_ID); - if (max_length > 0) { - if (tokens.size() > max_length - 1) { - tokens.resize(max_length - 1); - } else { - if (padding) { - tokens.insert(tokens.end(), max_length - 1 - tokens.size(), PAD_TOKEN_ID); - } - } - } - tokens.push_back(EOS_TOKEN_ID); - return tokens; - } - - std::vector encode(std::string text) { - std::string original_text = text; - std::vector bpe_tokens; - text = whitespace_clean(text); - std::transform(text.begin(), text.end(), text.begin(), [](unsigned char c) { return std::tolower(c); }); - - std::regex pat(R"(<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[[:alpha:]]+|[[:digit:]]|[^[:space:][:alpha:][:digit:]]+)", - std::regex::icase); - - std::smatch matches; - std::string str = text; - std::vector token_strs; - while (std::regex_search(str, matches, pat)) { - for (auto& token : matches) { - std::istringstream iss(bpe(token)); - std::vector tokens{std::istream_iterator{iss}, - std::istream_iterator{}}; - for (const auto& bpe_token : tokens) { - bpe_tokens.push_back(encoder[bpe_token]); - token_strs.push_back(bpe_token); - } - } - str = matches.suffix(); - } - std::stringstream ss; - ss << "["; - for (auto token : token_strs) { - ss << "\"" << token << "\", "; - } - ss << "]"; - LOG_DEBUG("split prompt \"%s\" to tokens %s", original_text.c_str(), ss.str().c_str()); - return bpe_tokens; - } -}; - -// Ref: https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/cad87bf4e3e0b0a759afa94e933527c3123d59bc/modules/prompt_parser.py#L345 -// -// Parses a string with attention tokens and returns a list of pairs: text and its associated weight. -// Accepted tokens are: -// (abc) - increases attention to abc by a multiplier of 1.1 -// (abc:3.12) - increases attention to abc by a multiplier of 3.12 -// [abc] - decreases attention to abc by a multiplier of 1.1 -// \( - literal character '(' -// \[ - literal character '[' -// \) - literal character ')' -// \] - literal character ']' -// \\ - literal character '\' -// anything else - just text -// -// >>> parse_prompt_attention('normal text') -// [['normal text', 1.0]] -// >>> parse_prompt_attention('an (important) word') -// [['an ', 1.0], ['important', 1.1], [' word', 1.0]] -// >>> parse_prompt_attention('(unbalanced') -// [['unbalanced', 1.1]] -// >>> parse_prompt_attention('\(literal\]') -// [['(literal]', 1.0]] -// >>> parse_prompt_attention('(unnecessary)(parens)') -// [['unnecessaryparens', 1.1]] -// >>> parse_prompt_attention('a (((house:1.3)) [on] a (hill:0.5), sun, (((sky))).') -// [['a ', 1.0], -// ['house', 1.5730000000000004], -// [' ', 1.1], -// ['on', 1.0], -// [' a ', 1.1], -// ['hill', 0.55], -// [', sun, ', 1.1], -// ['sky', 1.4641000000000006], -// ['.', 1.1]] -std::vector> parse_prompt_attention(const std::string& text) { - std::vector> res; - std::vector round_brackets; - std::vector square_brackets; - - float round_bracket_multiplier = 1.1f; - float square_bracket_multiplier = 1 / 1.1f; - - std::regex re_attention(R"(\\\(|\\\)|\\\[|\\\]|\\\\|\\|\(|\[|:([+-]?[.\d]+)\)|\)|\]|[^\\()\[\]:]+|:)"); - std::regex re_break(R"(\s*\bBREAK\b\s*)"); - - auto multiply_range = [&](int start_position, float multiplier) { - for (int p = start_position; p < res.size(); ++p) { - res[p].second *= multiplier; - } - }; - - std::smatch m; - std::string remaining_text = text; - - while (std::regex_search(remaining_text, m, re_attention)) { - std::string text = m[0]; - std::string weight = m[1]; - - if (text == "(") { - round_brackets.push_back(res.size()); - } else if (text == "[") { - square_brackets.push_back(res.size()); - } else if (!weight.empty()) { - if (!round_brackets.empty()) { - multiply_range(round_brackets.back(), std::stod(weight)); - round_brackets.pop_back(); - } - } else if (text == ")" && !round_brackets.empty()) { - multiply_range(round_brackets.back(), round_bracket_multiplier); - round_brackets.pop_back(); - } else if (text == "]" && !square_brackets.empty()) { - multiply_range(square_brackets.back(), square_bracket_multiplier); - square_brackets.pop_back(); - } else if (text == "\\(") { - res.push_back({text.substr(1), 1.0f}); - } else { - res.push_back({text, 1.0f}); - } - - remaining_text = m.suffix(); - } - - for (int pos : round_brackets) { - multiply_range(pos, round_bracket_multiplier); - } - - for (int pos : square_brackets) { - multiply_range(pos, square_bracket_multiplier); - } - - if (res.empty()) { - res.push_back({"", 1.0f}); - } - - int i = 0; - while (i + 1 < res.size()) { - if (res[i].second == res[i + 1].second) { - res[i].first += res[i + 1].first; - res.erase(res.begin() + i + 1); - } else { - ++i; - } - } - - return res; -} - -/*================================================ FrozenCLIPEmbedder ================================================*/ - -struct ResidualAttentionBlock { - int32_t n_head; - int32_t d_model; - int32_t hidden_size; // n_head * d_model - int32_t intermediate_size; - - // attention - struct ggml_tensor* q_w; // [hidden_size, hidden_size] - struct ggml_tensor* q_b; // [hidden_size, ] - struct ggml_tensor* k_w; // [hidden_size, hidden_size] - struct ggml_tensor* k_b; // [hidden_size, ] - struct ggml_tensor* v_w; // [hidden_size, hidden_size] - struct ggml_tensor* v_b; // [hidden_size, ] - - struct ggml_tensor* out_w; // [hidden_size, hidden_size] - struct ggml_tensor* out_b; // [hidden_size, ] - - // layer norm 1 - struct ggml_tensor* ln1_w; // [hidden_size, ] - struct ggml_tensor* ln1_b; // [hidden_size, ] - - // mlp - struct ggml_tensor* fc1_w; // [intermediate_size, hidden_size] - struct ggml_tensor* fc1_b; // [intermediate_size, ] - - struct ggml_tensor* fc2_w; // [hidden_size, intermediate_size] - struct ggml_tensor* fc2_b; // [hidden_size, ] - - // layer norm 2 - struct ggml_tensor* ln2_w; // [hidden_size, ] - struct ggml_tensor* ln2_b; // [hidden_size, ] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += 4 * hidden_size * hidden_size * ggml_type_sizef(wtype); // q_w/k_w/v_w/out_w - mem_size += 8 * hidden_size * ggml_type_sizef(GGML_TYPE_F32); // q_b/k_b/v_b/out_b/ln1_w/ln1_b/ln2_w/ln2_b - mem_size += 2 * hidden_size * intermediate_size * ggml_type_sizef(wtype); // fc1_w/fc2_w - mem_size += intermediate_size * ggml_type_sizef(GGML_TYPE_F32); // fc1_b - mem_size += hidden_size * ggml_type_sizef(GGML_TYPE_F32); // fc2_b - mem_size += 16 * ggml_tensor_overhead(); // tensor overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - ln1_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - ln1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - - q_w = ggml_new_tensor_2d(ctx, wtype, hidden_size, hidden_size); - q_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - k_w = ggml_new_tensor_2d(ctx, wtype, hidden_size, hidden_size); - k_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - v_w = ggml_new_tensor_2d(ctx, wtype, hidden_size, hidden_size); - v_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - - out_w = ggml_new_tensor_2d(ctx, wtype, hidden_size, hidden_size); - out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - - fc1_w = ggml_new_tensor_2d(ctx, wtype, hidden_size, intermediate_size); - fc1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, intermediate_size); - - fc2_w = ggml_new_tensor_2d(ctx, wtype, intermediate_size, hidden_size); - fc2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - - ln2_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - ln2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "self_attn.q_proj.weight"] = q_w; - tensors[prefix + "self_attn.q_proj.bias"] = q_b; - tensors[prefix + "self_attn.k_proj.weight"] = k_w; - tensors[prefix + "self_attn.k_proj.bias"] = k_b; - tensors[prefix + "self_attn.v_proj.weight"] = v_w; - tensors[prefix + "self_attn.v_proj.bias"] = v_b; - tensors[prefix + "self_attn.out_proj.weight"] = out_w; - tensors[prefix + "self_attn.out_proj.bias"] = out_b; - - tensors[prefix + "layer_norm1.weight"] = ln1_w; - tensors[prefix + "layer_norm1.bias"] = ln1_b; - - tensors[prefix + "layer_norm2.weight"] = ln2_w; - tensors[prefix + "layer_norm2.bias"] = ln2_b; - - tensors[prefix + "mlp.fc1.weight"] = fc1_w; - tensors[prefix + "mlp.fc1.bias"] = fc1_b; - - tensors[prefix + "mlp.fc2.weight"] = fc2_w; - tensors[prefix + "mlp.fc2.bias"] = fc2_b; - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, n_token, hidden_size] - int64_t N = x->ne[2]; - int64_t n_token = x->ne[1]; - int64_t hidden_size = n_head * d_model; - - struct ggml_tensor* r = x; - - // layer norm 1 - { - x = ggml_norm(ctx, x); - x = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ln1_w, x), x), - ggml_repeat(ctx, ln1_b, x)); - } - // self-attention - { - struct ggml_tensor* q = ggml_add(ctx, - ggml_repeat(ctx, q_b, x), - ggml_mul_mat(ctx, q_w, x)); - q = ggml_scale_inplace(ctx, q, ggml_new_f32(ctx, 1.0f / sqrt((float)d_model))); - q = ggml_reshape_4d(ctx, q, d_model, n_head, n_token, N); // [N, n_token, n_head, d_model] - q = ggml_cont(ctx, ggml_permute(ctx, q, 0, 2, 1, 3)); // [N, n_head, n_token, d_model] - q = ggml_reshape_3d(ctx, q, d_model, n_token, n_head * N); // [N * n_head, n_token, d_model] - - struct ggml_tensor* k = ggml_add(ctx, - ggml_repeat(ctx, k_b, x), - ggml_mul_mat(ctx, k_w, x)); - k = ggml_reshape_4d(ctx, k, d_model, n_head, n_token, N); // [N, n_token, n_head, d_model] - k = ggml_cont(ctx, ggml_permute(ctx, k, 0, 2, 1, 3)); // [N, n_head, n_token, d_model] - k = ggml_reshape_3d(ctx, k, d_model, n_token, n_head); // [N * n_head, n_token, d_model] - - struct ggml_tensor* v = ggml_add(ctx, - ggml_repeat(ctx, v_b, x), - ggml_mul_mat(ctx, v_w, x)); - v = ggml_reshape_4d(ctx, v, d_model, n_head, n_token, N); // [N, n_token, n_head, d_model] - v = ggml_cont(ctx, ggml_permute(ctx, v, 1, 2, 0, 3)); // [N, n_head, d_model, n_token] - v = ggml_reshape_3d(ctx, v, n_token, d_model, n_head * N); // [N * n_head, d_model, n_token] - - struct ggml_tensor* kq = ggml_mul_mat(ctx, k, q); // [N * n_head, n_token, n_token] - - kq = ggml_diag_mask_inf_inplace(ctx, kq, 0); - kq = ggml_soft_max_inplace(ctx, kq); - - struct ggml_tensor* kqv = ggml_mul_mat(ctx, v, kq); // [N * n_head, n_token, d_model] - kqv = ggml_reshape_4d(ctx, kqv, d_model, n_token, n_head, N); - kqv = ggml_cont(ctx, ggml_permute(ctx, kqv, 0, 2, 1, 3)); // [N, n_token, n_head, d_model] - - x = ggml_reshape_2d(ctx, kqv, d_model * n_head, n_token * N); // // [N * n_token, d_model * n_head] - } - - // attention output - x = ggml_add(ctx, ggml_repeat(ctx, out_b, x), ggml_mul_mat(ctx, out_w, x)); - - // residual - x = ggml_add(ctx, x, r); - r = x; - - // layer norm 2 - { - x = ggml_norm(ctx, x); - - x = ggml_add(ctx, ggml_mul(ctx, ggml_repeat(ctx, ln2_w, x), x), - ggml_repeat(ctx, ln2_b, x)); - } - - // mlp - x = ggml_mul_mat(ctx, fc1_w, x); - x = ggml_add(ctx, ggml_repeat(ctx, fc1_b, x), x); - - x = ggml_gelu_quick_inplace(ctx, x); - - x = ggml_mul_mat(ctx, fc2_w, x); - x = ggml_add(ctx, ggml_repeat(ctx, fc2_b, x), x); - - // residual 2 - x = ggml_add(ctx, x, r); - - return x; - } -}; - -struct CLIPTextModel { - // network hparams - int32_t vocab_size = 49408; - int32_t max_position_embeddings = 77; - int32_t hidden_size = 768; - int32_t intermediate_size = 3072; - int32_t projection_dim = 768; - int32_t n_head = 12; // num_attention_heads - int32_t num_hidden_layers = 12; - - // embeddings - struct ggml_tensor* position_ids; - struct ggml_tensor* token_embed_weight; - struct ggml_tensor* position_embed_weight; - // transformer - ResidualAttentionBlock resblocks[12]; - struct ggml_tensor* final_ln_w; - struct ggml_tensor* final_ln_b; - - CLIPTextModel() { - int d_model = hidden_size / n_head; // 64 - for (int i = 0; i < num_hidden_layers; i++) { - resblocks[i].d_model = d_model; - resblocks[i].n_head = n_head; - resblocks[i].hidden_size = hidden_size; - resblocks[i].intermediate_size = intermediate_size; - } - } - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += hidden_size * max_position_embeddings * ggml_type_sizef(GGML_TYPE_I32); // position_ids - mem_size += hidden_size * vocab_size * ggml_type_sizef(wtype); // token_embed_weight - mem_size += hidden_size * max_position_embeddings * ggml_type_sizef(wtype); // position_embed_weight - for (int i = 0; i < num_hidden_layers; i++) { - mem_size += resblocks[i].compute_params_mem_size(wtype); - } - mem_size += 2 * hidden_size * ggml_type_sizef(GGML_TYPE_F32); // final_ln_w/b - mem_size += ggml_tensor_overhead(); // object overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - position_ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, max_position_embeddings); - for (int i = 0; i < max_position_embeddings; i++) { - ggml_set_i32_1d(position_ids, i, i); - } - token_embed_weight = ggml_new_tensor_2d(ctx, wtype, hidden_size, vocab_size); - position_embed_weight = ggml_new_tensor_2d(ctx, wtype, hidden_size, max_position_embeddings); - - for (int i = 0; i < num_hidden_layers; i++) { - resblocks[i].init_params(ctx, wtype); - } - - final_ln_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - final_ln_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hidden_size); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "embeddings.token_embedding.weight"] = token_embed_weight; - tensors[prefix + "embeddings.position_embedding.weight"] = position_embed_weight; - tensors[prefix + "final_layer_norm.weight"] = final_ln_w; - tensors[prefix + "final_layer_norm.bias"] = final_ln_b; - for (int i = 0; i < num_hidden_layers; i++) { - resblocks[i].map_by_name(tensors, prefix + "encoder.layers." + std::to_string(i) + "."); - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* input_ids) { - // input_ids: [N, n_token] - GGML_ASSERT(input_ids->ne[0] <= position_ids->ne[0]); - - // token_embedding + position_embedding - struct ggml_tensor* x; - x = ggml_add(ctx, - ggml_get_rows(ctx, token_embed_weight, input_ids), - ggml_get_rows(ctx, - position_embed_weight, - ggml_view_1d(ctx, position_ids, input_ids->ne[0], 0))); // [N, n_token, hidden_size] - - // transformer - for (int i = 0; i < num_hidden_layers; i++) { - x = resblocks[i].forward(ctx, x); // [N, n_token, hidden_size] - } - - // final layer norm - { - x = ggml_norm(ctx, x); - - x = ggml_add(ctx, ggml_mul(ctx, ggml_repeat(ctx, final_ln_w, x), x), - ggml_repeat(ctx, final_ln_b, x)); - } - - return x; // [N, n_token, hidden_size] - } -}; - -// ldm.modules.encoders.modules.FrozenCLIPEmbedder -struct FrozenCLIPEmbedder { - CLIPTokenizer tokenizer; - CLIPTextModel text_model; - struct ggml_tensor* forward(struct ggml_context* ctx, const std::string& prompt) { - std::vector tokens = tokenizer.tokenize(prompt, text_model.max_position_embeddings, true); - struct ggml_tensor* input_ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, tokens.size()); - memcpy(input_ids->data, tokens.data(), tokens.size() * ggml_element_size(input_ids)); - struct ggml_tensor* hidden_states = text_model.forward(ctx, input_ids); - return hidden_states; - } -}; - -// Ref: https://github.com/AUTOMATIC1111/stable-diffusion-webui/blob/cad87bf4e3e0b0a759afa94e933527c3123d59bc/modules/sd_hijack_clip.py#L283 -struct FrozenCLIPEmbedderWithCustomWords { - CLIPTokenizer tokenizer; - CLIPTextModel text_model; - - std::pair, std::vector> tokenize(std::string text, - size_t max_length = 0, - bool padding = false) { - auto parsed_attention = parse_prompt_attention(text); - - { - std::stringstream ss; - ss << "["; - for (const auto& item : parsed_attention) { - ss << "['" << item.first << "', " << item.second << "], "; - } - ss << "]"; - LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); - } - - std::vector tokens; - std::vector weights; - for (const auto& item : parsed_attention) { - const std::string& curr_text = item.first; - float curr_weight = item.second; - std::vector curr_tokens = tokenizer.encode(curr_text); - tokens.insert(tokens.end(), curr_tokens.begin(), curr_tokens.end()); - weights.insert(weights.end(), curr_tokens.size(), curr_weight); - } - tokens.insert(tokens.begin(), BOS_TOKEN_ID); - weights.insert(weights.begin(), 1.0); - - if (max_length > 0) { - if (tokens.size() > max_length - 1) { - tokens.resize(max_length - 1); - weights.resize(max_length - 1); - } else { - if (padding) { - tokens.insert(tokens.end(), max_length - 1 - tokens.size(), PAD_TOKEN_ID); - weights.insert(weights.end(), max_length - 1 - weights.size(), 1.0); - } - } - } - tokens.push_back(EOS_TOKEN_ID); - weights.push_back(1.0); - - // for (int i = 0; i < tokens.size(); i++) { - // std::cout << tokens[i] << ":" << weights[i] << ", "; - // } - // std::cout << std::endl; - - return {tokens, weights}; - } -}; - -/*==================================================== UnetModel =====================================================*/ - -struct ResBlock { - // network hparams - int channels; // model_channels * (1, 1, 1, 2, 2, 4, 4, 4) - int emb_channels; // time_embed_dim - int out_channels; // mult * model_channels - - // network params - // in_layers - struct ggml_tensor* in_layer_0_w; // [channels, ] - struct ggml_tensor* in_layer_0_b; // [channels, ] - // in_layer_1 is nn.SILU() - struct ggml_tensor* in_layer_2_w; // [out_channels, channels, 3, 3] - struct ggml_tensor* in_layer_2_b; // [out_channels, ] - - // emb_layers - // emb_layer_0 is nn.SILU() - struct ggml_tensor* emb_layer_1_w; // [out_channels, emb_channels] - struct ggml_tensor* emb_layer_1_b; // [out_channels, ] - - // out_layers - struct ggml_tensor* out_layer_0_w; // [out_channels, ] - struct ggml_tensor* out_layer_0_b; // [out_channels, ] - // out_layer_1 is nn.SILU() - // out_layer_2 is nn.Dropout(), p = 0 for inference - struct ggml_tensor* out_layer_3_w; // [out_channels, out_channels, 3, 3] - struct ggml_tensor* out_layer_3_b; // [out_channels, ] - - // skip connection, only if out_channels != channels - struct ggml_tensor* skip_w; // [out_channels, channels, 1, 1] - struct ggml_tensor* skip_b; // [out_channels, ] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += 2 * channels * ggml_type_sizef(GGML_TYPE_F32); // in_layer_0_w/b - mem_size += out_channels * channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // in_layer_2_w - mem_size += 5 * out_channels * ggml_type_sizef(GGML_TYPE_F32); // in_layer_2_b/emb_layer_1_b/out_layer_0_w/out_layer_0_b/out_layer_3_b - mem_size += out_channels * emb_channels * ggml_type_sizef(wtype); // emb_layer_1_w - mem_size += out_channels * out_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // out_layer_3_w - - mem_size += 10 * ggml_tensor_overhead(); // object overhead - - if (out_channels != channels) { - mem_size += out_channels * channels * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // skip_w - mem_size += out_channels * ggml_type_sizef(GGML_TYPE_F32); // skip_b - - mem_size += 2 * ggml_tensor_overhead(); // object overhead - } - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - in_layer_0_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, channels); - in_layer_0_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, channels); - in_layer_2_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, channels, out_channels); - in_layer_2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - - emb_layer_1_w = ggml_new_tensor_2d(ctx, wtype, emb_channels, out_channels); - emb_layer_1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - - out_layer_0_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - out_layer_0_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - out_layer_3_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, out_channels, out_channels); - out_layer_3_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - - if (out_channels != channels) { - skip_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, channels, out_channels); - skip_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - } - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "in_layers.0.weight"] = in_layer_0_w; - tensors[prefix + "in_layers.0.bias"] = in_layer_0_b; - tensors[prefix + "in_layers.2.weight"] = in_layer_2_w; - tensors[prefix + "in_layers.2.bias"] = in_layer_2_b; - - tensors[prefix + "emb_layers.1.weight"] = emb_layer_1_w; - tensors[prefix + "emb_layers.1.bias"] = emb_layer_1_b; - - tensors[prefix + "out_layers.0.weight"] = out_layer_0_w; - tensors[prefix + "out_layers.0.bias"] = out_layer_0_b; - tensors[prefix + "out_layers.3.weight"] = out_layer_3_w; - tensors[prefix + "out_layers.3.bias"] = out_layer_3_b; - - if (out_channels != channels) { - tensors[prefix + "skip_connection.weight"] = skip_w; - tensors[prefix + "skip_connection.bias"] = skip_b; - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* emb) { - // x: [N, channels, h, w] - // emb: [N, emb_channels] - - // in_layers - // group norm 32 - auto h = ggml_group_norm(ctx, x); - h = ggml_add(ctx, - ggml_mul(ctx, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, in_layer_0_w, 1, 1, in_layer_0_w->ne[0], 1), - h), - h), - ggml_repeat(ctx, - ggml_reshape_4d(ctx, in_layer_0_b, 1, 1, in_layer_0_b->ne[0], 1), - h)); - // silu - h = ggml_silu_inplace(ctx, h); - // conv2d - h = ggml_conv_2d(ctx, in_layer_2_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, in_layer_2_b, 1, 1, in_layer_2_b->ne[0], 1), - h)); // [N, out_channels, h, w] - - // emb_layers - auto emb_out = ggml_silu(ctx, emb); - emb_out = ggml_mul_mat(ctx, emb_layer_1_w, emb_out); - emb_out = ggml_add(ctx, ggml_repeat(ctx, emb_layer_1_b, emb_out), emb_out); // [N, out_channels] - emb_out = ggml_reshape_4d(ctx, emb_out, 1, 1, emb_out->ne[0], emb_out->ne[1]); // [N, out_channels, 1, 1] - emb_out = ggml_repeat(ctx, emb_out, h); // [N, out_channels, h, w] - - // out_layers - h = ggml_add(ctx, h, emb_out); - // group norm 32 - h = ggml_group_norm_inplace(ctx, h); - h = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, out_layer_0_w, 1, 1, out_layer_0_w->ne[0], 1), h), h), - ggml_repeat(ctx, ggml_reshape_4d(ctx, out_layer_0_b, 1, 1, out_layer_0_b->ne[0], 1), h)); - // silu - h = ggml_silu_inplace(ctx, h); - // dropout, skip for inference - // conv2d - h = ggml_conv_2d(ctx, out_layer_3_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, out_layer_3_b, 1, 1, out_layer_3_b->ne[0], 1), - h)); // [N, out_channels, h, w - - // skip connection - if (out_channels != channels) { - x = ggml_conv_2d(ctx, skip_w, x, 1, 1, 0, 0, 1, 1); - x = ggml_add(ctx, - x, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, skip_b, 1, 1, skip_b->ne[0], 1), - x)); // [N, out_channels, h, w] - } - h = ggml_add(ctx, h, x); - return h; // [N, out_channels, h, w] - } -}; - -struct SpatialTransformer { - int in_channels; // mult * model_channels - int n_head; // num_heads - int d_head; // in_channels // n_heads - int depth = 1; // 1 - int context_dim = 768; // hidden_size - - // group norm - struct ggml_tensor* norm_w; // [in_channels,] - struct ggml_tensor* norm_b; // [in_channels,] - - // proj_in - struct ggml_tensor* proj_in_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* proj_in_b; // [in_channels,] - - // transformer - struct - { - // layer norm 1 - struct ggml_tensor* norm1_w; // [in_channels, ] - struct ggml_tensor* norm1_b; // [in_channels, ] - - // attn1 - struct ggml_tensor* attn1_q_w; // [in_channels, in_channels] - struct ggml_tensor* attn1_k_w; // [in_channels, in_channels] - struct ggml_tensor* attn1_v_w; // [in_channels, in_channels] - - struct ggml_tensor* attn1_out_w; // [in_channels, in_channels] - struct ggml_tensor* attn1_out_b; // [in_channels, ] - - // layer norm 2 - struct ggml_tensor* norm2_w; // [in_channels, ] - struct ggml_tensor* norm2_b; // [in_channels, ] - - // attn2 - struct ggml_tensor* attn2_q_w; // [in_channels, in_channels] - struct ggml_tensor* attn2_k_w; // [in_channels, context_dim] - struct ggml_tensor* attn2_v_w; // [in_channels, context_dim] - - struct ggml_tensor* attn2_out_w; // [in_channels, in_channels] - struct ggml_tensor* attn2_out_b; // [in_channels, ] - - // layer norm 3 - struct ggml_tensor* norm3_w; // [in_channels, ] - struct ggml_tensor* norm3_b; // [in_channels, ] - - // ff - struct ggml_tensor* ff_0_proj_w; // [in_channels * 4 * 2, in_channels] - struct ggml_tensor* ff_0_proj_b; // [in_channels * 4 * 2] - - struct ggml_tensor* ff_2_w; // [in_channels, in_channels * 4] - struct ggml_tensor* ff_2_b; // [in_channels,] - } transformer; - - // proj_out - struct ggml_tensor* proj_out_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* proj_out_b; // [in_channels,] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += 2 * in_channels * ggml_type_sizef(GGML_TYPE_F32); // norm_w/norm_b - mem_size += 2 * in_channels * in_channels * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // proj_in_w/proj_out_w - mem_size += 2 * in_channels * ggml_type_sizef(GGML_TYPE_F32); // proj_in_b/proj_out_b - - // transformer - { - mem_size += 6 * in_channels * ggml_type_sizef(GGML_TYPE_F32); // norm1-3_w/b - mem_size += 6 * in_channels * in_channels * ggml_type_sizef(wtype); // attn1_q/k/v/out_w attn2_q/out_w - mem_size += 2 * in_channels * context_dim * ggml_type_sizef(wtype); // attn2_k/v_w - mem_size += in_channels * 4 * 2 * in_channels * ggml_type_sizef(wtype); // ff_0_proj_w - mem_size += in_channels * 4 * 2 * ggml_type_sizef(GGML_TYPE_F32); // ff_0_proj_b - mem_size += in_channels * 4 * in_channels * ggml_type_sizef(wtype); // ff_2_w - mem_size += in_channels * ggml_type_sizef(GGML_TYPE_F32); // ff_2_b - } - mem_size += 26 * ggml_tensor_overhead(); // object overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - norm_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - norm_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - proj_in_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - proj_in_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - proj_out_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - proj_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - // transformer - transformer.norm1_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - transformer.norm1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - transformer.attn1_q_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - transformer.attn1_k_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - transformer.attn1_v_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - - transformer.attn1_out_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - transformer.attn1_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - transformer.norm2_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - transformer.norm2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - transformer.attn2_q_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - transformer.attn2_k_w = ggml_new_tensor_2d(ctx, wtype, context_dim, in_channels); - transformer.attn2_v_w = ggml_new_tensor_2d(ctx, wtype, context_dim, in_channels); - - transformer.attn2_out_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels); - transformer.attn2_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - transformer.norm3_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - transformer.norm3_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - transformer.ff_0_proj_w = ggml_new_tensor_2d(ctx, wtype, in_channels, in_channels * 4 * 2); - transformer.ff_0_proj_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels * 4 * 2); - - transformer.ff_2_w = ggml_new_tensor_2d(ctx, wtype, in_channels * 4, in_channels); - transformer.ff_2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "norm.weight"] = norm_w; - tensors[prefix + "norm.bias"] = norm_b; - tensors[prefix + "proj_in.weight"] = proj_in_w; - tensors[prefix + "proj_in.bias"] = proj_in_b; - - // transformer - { - std::string transformer_prefix = prefix + "transformer_blocks.0."; - tensors[transformer_prefix + "attn1.to_q.weight"] = transformer.attn1_q_w; - tensors[transformer_prefix + "attn1.to_k.weight"] = transformer.attn1_k_w; - tensors[transformer_prefix + "attn1.to_v.weight"] = transformer.attn1_v_w; - - tensors[transformer_prefix + "attn1.to_out.0.weight"] = transformer.attn1_out_w; - tensors[transformer_prefix + "attn1.to_out.0.bias"] = transformer.attn1_out_b; - - tensors[transformer_prefix + "ff.net.0.proj.weight"] = transformer.ff_0_proj_w; - tensors[transformer_prefix + "ff.net.0.proj.bias"] = transformer.ff_0_proj_b; - tensors[transformer_prefix + "ff.net.2.weight"] = transformer.ff_2_w; - tensors[transformer_prefix + "ff.net.2.bias"] = transformer.ff_2_b; - - tensors[transformer_prefix + "attn2.to_q.weight"] = transformer.attn2_q_w; - tensors[transformer_prefix + "attn2.to_k.weight"] = transformer.attn2_k_w; - tensors[transformer_prefix + "attn2.to_v.weight"] = transformer.attn2_v_w; - - tensors[transformer_prefix + "attn2.to_out.0.weight"] = transformer.attn2_out_w; - tensors[transformer_prefix + "attn2.to_out.0.bias"] = transformer.attn2_out_b; - - tensors[transformer_prefix + "norm1.weight"] = transformer.norm1_w; - tensors[transformer_prefix + "norm1.bias"] = transformer.norm1_b; - tensors[transformer_prefix + "norm2.weight"] = transformer.norm2_w; - tensors[transformer_prefix + "norm2.bias"] = transformer.norm2_b; - tensors[transformer_prefix + "norm3.weight"] = transformer.norm3_w; - tensors[transformer_prefix + "norm3.bias"] = transformer.norm3_b; - } - - tensors[prefix + "proj_out.weight"] = proj_out_w; - tensors[prefix + "proj_out.bias"] = proj_out_b; - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x, struct ggml_tensor* context) { - // x: [N, in_channels, h, w] - // context: [N, max_position, hidden_size(aka context_dim)] - - auto x_in = x; - // group norm 32 - x = ggml_group_norm(ctx, x); - x = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_w, 1, 1, norm_w->ne[0], 1), x), x), - ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_b, 1, 1, norm_b->ne[0], 1), x)); - // proj_in - x = ggml_conv_2d(ctx, proj_in_w, x, 1, 1, 0, 0, 1, 1); - x = ggml_add(ctx, - x, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, proj_in_b, 1, 1, proj_in_b->ne[0], 1), - x)); // [N, in_channels, h, w] - - // transformer - const int64_t n = x->ne[3]; - const int64_t c = x->ne[2]; - const int64_t h = x->ne[1]; - const int64_t w = x->ne[0]; - const int64_t max_position = context->ne[1]; - x = ggml_cont(ctx, ggml_permute(ctx, x, 1, 2, 0, 3)); // [N, h, w, in_channels] - - { - auto r = x; - // layer norm 1 - { - x = ggml_reshape_2d(ctx, x, c, w * h * n); - x = ggml_norm(ctx, x); - x = ggml_add(ctx, - ggml_mul(ctx, - ggml_repeat(ctx, transformer.norm1_w, x), - x), - ggml_repeat(ctx, transformer.norm1_b, x)); - } - - // self-attention - { - x = ggml_reshape_2d(ctx, x, c, h * w * n); // [N * h * w, in_channels] - struct ggml_tensor* q = ggml_mul_mat(ctx, transformer.attn1_q_w, x); // [N * h * w, in_channels] - q = ggml_scale_inplace(ctx, q, ggml_new_f32(ctx, 1.0f / sqrt((float)d_head))); - q = ggml_reshape_4d(ctx, q, d_head, n_head, h * w, n); // [N, h * w, n_head, d_head] - q = ggml_cont(ctx, ggml_permute(ctx, q, 0, 2, 1, 3)); // [N, n_head, h * w, d_head] - q = ggml_reshape_3d(ctx, q, d_head, h * w, n_head * n); // [N * n_head, h * w, d_head] - - struct ggml_tensor* k = ggml_mul_mat(ctx, transformer.attn1_k_w, x); // [N * h * w, in_channels] - k = ggml_reshape_4d(ctx, k, d_head, n_head, h * w, n); // [N, h * w, n_head, d_head] - k = ggml_cont(ctx, ggml_permute(ctx, k, 0, 2, 1, 3)); // [N, n_head, h * w, d_head] - k = ggml_reshape_3d(ctx, k, d_head, h * w, n_head * n); // [N * n_head, h * w, d_head] - - struct ggml_tensor* v = ggml_mul_mat(ctx, transformer.attn1_v_w, x); // [N * h * w, in_channels] - v = ggml_reshape_4d(ctx, v, d_head, n_head, h * w, n); // [N, h * w, n_head, d_head] - v = ggml_cont(ctx, ggml_permute(ctx, v, 1, 2, 0, 3)); // [N, n_head, d_head, h * w] - v = ggml_reshape_3d(ctx, v, h * w, d_head, n_head * n); // [N * n_head, d_head, h * w] - - struct ggml_tensor* kq = ggml_mul_mat(ctx, k, q); // [N * n_head, h * w, h * w] - // kq = ggml_diag_mask_inf_inplace(ctx, kq, 0); - kq = ggml_soft_max_inplace(ctx, kq); - - struct ggml_tensor* kqv = ggml_mul_mat(ctx, v, kq); // [N * n_head, h * w, d_head] - kqv = ggml_reshape_4d(ctx, kqv, d_head, h * w, n_head, n); - kqv = ggml_cont(ctx, ggml_permute(ctx, kqv, 0, 2, 1, 3)); // [N, h * w, n_head, d_head] - - // x = ggml_cpy(ctx, kqv, ggml_new_tensor_2d(ctx, GGML_TYPE_F32, d_head * n_head, h * w * n)); - x = ggml_reshape_2d(ctx, kqv, d_head * n_head, h * w * n); - - x = ggml_add(ctx, ggml_repeat(ctx, transformer.attn1_out_b, x), ggml_mul_mat(ctx, transformer.attn1_out_w, x)); - - x = ggml_reshape_4d(ctx, x, c, w, h, n); - } - - x = ggml_add(ctx, x, r); - r = x; - - // layer norm 2 - { - x = ggml_norm(ctx, x); - x = ggml_add(ctx, - ggml_mul(ctx, - ggml_repeat(ctx, transformer.norm2_w, x), x), - ggml_repeat(ctx, transformer.norm2_b, x)); - } - - // cross-attention - { - x = ggml_reshape_2d(ctx, x, c, h * w * n); // [N * h * w, in_channels] - context = ggml_reshape_2d(ctx, context, context->ne[0], context->ne[1] * context->ne[2]); // [N * max_position, hidden_size] - struct ggml_tensor* q = ggml_mul_mat(ctx, transformer.attn2_q_w, x); // [N * h * w, in_channels] - - q = ggml_scale_inplace(ctx, q, ggml_new_f32(ctx, 1.0f / sqrt((float)d_head))); - q = ggml_reshape_4d(ctx, q, d_head, n_head, h * w, n); // [N, h * w, n_head, d_head] - q = ggml_cont(ctx, ggml_permute(ctx, q, 0, 2, 1, 3)); // [N, n_head, h * w, d_head] - q = ggml_reshape_3d(ctx, q, d_head, h * w, n_head * n); // [N * n_head, h * w, d_head] - - struct ggml_tensor* k = ggml_mul_mat(ctx, transformer.attn2_k_w, context); // [N * max_position, in_channels] - k = ggml_reshape_4d(ctx, k, d_head, n_head, max_position, n); // [N, max_position, n_head, d_head] - k = ggml_cont(ctx, ggml_permute(ctx, k, 0, 2, 1, 3)); // [N, n_head, max_position, d_head] - k = ggml_reshape_3d(ctx, k, d_head, max_position, n_head * n); // [N * n_head, max_position, d_head] - - struct ggml_tensor* v = ggml_mul_mat(ctx, transformer.attn2_v_w, context); // [N * max_position, in_channels] - v = ggml_reshape_4d(ctx, v, d_head, n_head, max_position, n); // [N, max_position, n_head, d_head] - v = ggml_cont(ctx, ggml_permute(ctx, v, 1, 2, 0, 3)); // [N, n_head, d_head, max_position] - v = ggml_reshape_3d(ctx, v, max_position, d_head, n_head * n); // [N * n_head, d_head, max_position] - - struct ggml_tensor* kq = ggml_mul_mat(ctx, k, q); // [N * n_head, h * w, max_position] - // kq = ggml_diag_mask_inf_inplace(ctx, kq, 0); - kq = ggml_soft_max_inplace(ctx, kq); - - struct ggml_tensor* kqv = ggml_mul_mat(ctx, v, kq); // [N * n_head, h * w, d_head] - - kqv = ggml_reshape_4d(ctx, kqv, d_head, h * w, n_head, n); - kqv = ggml_cont(ctx, ggml_permute(ctx, kqv, 0, 2, 1, 3)); - - // x = ggml_cpy(ctx, kqv, ggml_new_tensor_2d(ctx, GGML_TYPE_F32, d_head * n_head, h * w * n)); // [N * h * w, in_channels] - x = ggml_reshape_2d(ctx, kqv, d_head * n_head, h * w * n); // [N * h * w, in_channels] - - x = ggml_add(ctx, ggml_repeat(ctx, transformer.attn2_out_b, x), ggml_mul_mat(ctx, transformer.attn2_out_w, x)); - - x = ggml_reshape_4d(ctx, x, c, w, h, n); - } - - x = ggml_add(ctx, x, r); - r = x; - - // layer norm 3 - { - x = ggml_reshape_2d(ctx, x, c, h * w * n); // [N * h * w, in_channels] - x = ggml_norm(ctx, x); - x = ggml_add(ctx, - ggml_mul(ctx, - ggml_repeat(ctx, transformer.norm3_w, x), x), - ggml_repeat(ctx, transformer.norm3_b, x)); - } - - // ff - { - // GEGLU - auto x_w = ggml_view_2d(ctx, - transformer.ff_0_proj_w, - transformer.ff_0_proj_w->ne[0], - transformer.ff_0_proj_w->ne[1] / 2, - transformer.ff_0_proj_w->nb[1], - 0); // [in_channels * 4, in_channels] - auto x_b = ggml_view_1d(ctx, - transformer.ff_0_proj_b, - transformer.ff_0_proj_b->ne[0] / 2, - 0); // [in_channels * 4, in_channels] - auto gate_w = ggml_view_2d(ctx, - transformer.ff_0_proj_w, - transformer.ff_0_proj_w->ne[0], - transformer.ff_0_proj_w->ne[1] / 2, - transformer.ff_0_proj_w->nb[1], - transformer.ff_0_proj_w->nb[1] * transformer.ff_0_proj_w->ne[1] / 2); // [in_channels * 4, ] - auto gate_b = ggml_view_1d(ctx, - transformer.ff_0_proj_b, - transformer.ff_0_proj_b->ne[0] / 2, - transformer.ff_0_proj_b->nb[0] * transformer.ff_0_proj_b->ne[0] / 2); // [in_channels * 4, ] - x = ggml_reshape_2d(ctx, x, c, w * h * n); - auto x_in = x; - x = ggml_mul_mat(ctx, x_w, x_in); // [N * h * w, in_channels * 4] - x = ggml_add(ctx, ggml_repeat(ctx, x_b, x), x); - auto gate = ggml_mul_mat(ctx, gate_w, x_in); // [N * h * w, in_channels * 4] - gate = ggml_add(ctx, ggml_repeat(ctx, gate_b, gate), gate); - - gate = ggml_gelu_inplace(ctx, gate); - - x = ggml_mul(ctx, x, gate); // [N * h * w, in_channels * 4] - // fc - x = ggml_mul_mat(ctx, transformer.ff_2_w, x); // [N * h * w, in_channels] - x = ggml_add(ctx, ggml_repeat(ctx, transformer.ff_2_b, x), x); - } - - x = ggml_reshape_4d(ctx, x, c, w, h, n); // [N, h, w, in_channels] - - // residual - x = ggml_add(ctx, x, r); - } - x = ggml_cont(ctx, ggml_permute(ctx, x, 2, 0, 1, 3)); // // [N, in_channels, h, w] - - // proj_out - x = ggml_conv_2d(ctx, proj_out_w, x, 1, 1, 0, 0, 1, 1); - x = ggml_add(ctx, - x, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, proj_out_b, 1, 1, proj_out_b->ne[0], 1), - x)); // [N, in_channels, h, w] - x = ggml_add(ctx, x, x_in); - return x; - } -}; - -struct DownSample { - // hparams - int channels; - int out_channels; - - // conv2d params - struct ggml_tensor* op_w; // [out_channels, channels, 3, 3] - struct ggml_tensor* op_b; // [out_channels,] - - bool vae_downsample = false; - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += out_channels * channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // op_w - mem_size += out_channels * ggml_type_sizef(GGML_TYPE_F32); // op_b - mem_size += 2 * ggml_tensor_overhead(); // object overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - op_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, channels, out_channels); - op_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - if (vae_downsample) { - tensors[prefix + "conv.weight"] = op_w; - tensors[prefix + "conv.bias"] = op_b; - } else { - tensors[prefix + "op.weight"] = op_w; - tensors[prefix + "op.bias"] = op_b; - } - } - - // TODO: making it parallel - static void asymmetric_pad(struct ggml_tensor* dst, - const struct ggml_tensor* a, - const struct ggml_tensor* b, - int ith, - int nth, - void* userdata) { - assert(sizeof(dst->nb[0]) == sizeof(float)); - assert(sizeof(a->nb[0]) == sizeof(float)); - assert(sizeof(b->nb[0]) == sizeof(float)); - float value = 0; - - for (int i = 0; i < dst->ne[3]; i++) { - for (int j = 0; j < dst->ne[2]; j++) { - for (int k = 0; k < dst->ne[1]; k++) { - for (int l = 0; l < dst->ne[0]; l++) { - if (k == dst->ne[1] - 1 || l == dst->ne[0] - 1) { - value = 0; - } else { - value = ggml_tensor_get_f32(b, l, k, j, i); - } - // printf("%d %d %d %d -> %f\n", i, j, k, l, value); - ggml_tensor_set_f32(dst, value, l, k, j, i); - } - } - } - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, channels, h, w] - if (vae_downsample) { - bool dynamic = ggml_get_dynamic(ctx); - ggml_set_dynamic(ctx, false); - auto pad_x = ggml_new_tensor_4d(ctx, x->type, x->ne[0] + 1, x->ne[1] + 1, x->ne[2], x->ne[3]); - ggml_set_dynamic(ctx, dynamic); - - x = ggml_map_custom2_inplace(ctx, pad_x, x, asymmetric_pad, 1, NULL); - x = ggml_conv_2d(ctx, op_w, x, 2, 2, 0, 0, 1, 1); - } else { - x = ggml_conv_2d(ctx, op_w, x, 2, 2, 1, 1, 1, 1); - } - x = ggml_add(ctx, - x, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, op_b, 1, 1, op_b->ne[0], 1), - x)); // [N, out_channels, h/2, w/2] - return x; - } -}; - -struct UpSample { - // hparams - int channels; - int out_channels; - - // conv2d params - struct ggml_tensor* conv_w; // [out_channels, channels, 3, 3] - struct ggml_tensor* conv_b; // [out_channels,] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += out_channels * channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // op_w - mem_size += out_channels * ggml_type_sizef(GGML_TYPE_F32); // op_b - mem_size += 2 * ggml_tensor_overhead(); // object overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - conv_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, channels, out_channels); - conv_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "conv.weight"] = conv_w; - tensors[prefix + "conv.bias"] = conv_b; - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, channels, h, w] - x = ggml_upscale(ctx, x); // [N, channels, h*2, w*2] - x = ggml_conv_2d(ctx, conv_w, x, 1, 1, 1, 1, 1, 1); - - x = ggml_add(ctx, - x, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv_b, 1, 1, conv_b->ne[0], 1), - x)); // [N, out_channels, h*2, w*2] - return x; - } -}; - -// ldm.modules.diffusionmodules.openaimodel.UNetModel -struct UNetModel { - // network hparams - int in_channels = 4; - int model_channels = 320; - int out_channels = 4; - int num_res_blocks = 2; - int attention_resolutions[3] = {4, 2, 1}; - int channel_mult[4] = {1, 2, 4, 4}; - int time_embed_dim = 1280; // model_channels*4 - int num_heads = 8; - int num_head_channels = -1; // channels // num_heads - - // network params - struct ggml_tensor* time_embed_0_w; // [time_embed_dim, model_channels] - struct ggml_tensor* time_embed_0_b; // [time_embed_dim, ] - // time_embed_1 is nn.SILU() - struct ggml_tensor* time_embed_2_w; // [time_embed_dim, time_embed_dim] - struct ggml_tensor* time_embed_2_b; // [time_embed_dim, ] - - struct ggml_tensor* input_block_0_w; // [model_channels, in_channels, 3, 3] - struct ggml_tensor* input_block_0_b; // [model_channels, ] - - // input_blocks - ResBlock input_res_blocks[4][2]; - SpatialTransformer input_transformers[3][2]; - DownSample input_down_samples[3]; - - // middle_block - ResBlock middle_block_0; - SpatialTransformer middle_block_1; - ResBlock middle_block_2; - - // output_blocks - ResBlock output_res_blocks[4][3]; - SpatialTransformer output_transformers[3][3]; - UpSample output_up_samples[3]; - - // out - // group norm 32 - struct ggml_tensor* out_0_w; // [model_channels, ] - struct ggml_tensor* out_0_b; // [model_channels, ] - // out 1 is nn.SILU() - struct ggml_tensor* out_2_w; // [out_channels, model_channels, 3, 3] - struct ggml_tensor* out_2_b; // [out_channels, ] - - UNetModel() { - // set up hparams of blocks - - // input_blocks - std::vector input_block_chans; - input_block_chans.push_back(model_channels); - int ch = model_channels; - int ds = 1; - - int len_mults = sizeof(channel_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - int mult = channel_mult[i]; - for (int j = 0; j < num_res_blocks; j++) { - input_res_blocks[i][j].channels = ch; - input_res_blocks[i][j].emb_channels = time_embed_dim; - input_res_blocks[i][j].out_channels = mult * model_channels; - - ch = mult * model_channels; - - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - input_transformers[i][j].in_channels = ch; - input_transformers[i][j].n_head = num_heads; - input_transformers[i][j].d_head = ch / num_heads; - } - input_block_chans.push_back(ch); - } - if (i != len_mults - 1) { - input_down_samples[i].channels = ch; - input_down_samples[i].out_channels = ch; - input_block_chans.push_back(ch); - - ds *= 2; - } - } - - // middle blocks - middle_block_0.channels = ch; - middle_block_0.emb_channels = time_embed_dim; - middle_block_0.out_channels = ch; - - middle_block_1.in_channels = ch; - middle_block_1.n_head = num_heads; - middle_block_1.d_head = ch / num_heads; - - middle_block_2.channels = ch; - middle_block_2.emb_channels = time_embed_dim; - middle_block_2.out_channels = ch; - - // output blocks - for (int i = len_mults - 1; i >= 0; i--) { - int mult = channel_mult[i]; - for (int j = 0; j < num_res_blocks + 1; j++) { - int ich = input_block_chans.back(); - input_block_chans.pop_back(); - - output_res_blocks[i][j].channels = ch + ich; - output_res_blocks[i][j].emb_channels = time_embed_dim; - output_res_blocks[i][j].out_channels = mult * model_channels; - - ch = mult * model_channels; - - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - output_transformers[i][j].in_channels = ch; - output_transformers[i][j].n_head = num_heads; - output_transformers[i][j].d_head = ch / num_heads; - } - - if (i > 0 && j == num_res_blocks) { - output_up_samples[i - 1].channels = ch; - output_up_samples[i - 1].out_channels = ch; - - ds /= 2; - } - } - } - } - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += time_embed_dim * model_channels * ggml_type_sizef(wtype); // time_embed_0_w - mem_size += time_embed_dim * ggml_type_sizef(GGML_TYPE_F32); // time_embed_0_b - mem_size += time_embed_dim * time_embed_dim * ggml_type_sizef(wtype); // time_embed_2_w - mem_size += time_embed_dim * ggml_type_sizef(GGML_TYPE_F32); // time_embed_2_b - - mem_size += model_channels * in_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // input_block_0_w - mem_size += model_channels * ggml_type_sizef(GGML_TYPE_F32); // input_block_0_b - - mem_size += 6 * ggml_tensor_overhead(); // object overhead - - // input_blocks - int ds = 1; - int len_mults = sizeof(channel_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - mem_size += input_res_blocks[i][j].compute_params_mem_size(wtype); - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - mem_size += input_transformers[i][j].compute_params_mem_size(wtype); - } - } - if (i != len_mults - 1) { - ds *= 2; - mem_size += input_down_samples[i].compute_params_mem_size(wtype); - } - } - - // middle_block - mem_size += middle_block_0.compute_params_mem_size(wtype); - mem_size += middle_block_1.compute_params_mem_size(wtype); - mem_size += middle_block_2.compute_params_mem_size(wtype); - - // output_blocks - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - mem_size += output_res_blocks[i][j].compute_params_mem_size(wtype); - - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - mem_size += output_transformers[i][j].compute_params_mem_size(wtype); - } - - if (i > 0 && j == num_res_blocks) { - mem_size += output_up_samples[i - 1].compute_params_mem_size(wtype); - - ds /= 2; - } - } - } - - // out - mem_size += 2 * model_channels * ggml_type_sizef(GGML_TYPE_F32); // out_0_w/b - mem_size += out_channels * model_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // out_2_w - mem_size += out_channels * ggml_type_sizef(GGML_TYPE_F32); // out_2_b - - mem_size += 4 * ggml_tensor_overhead(); - - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - time_embed_0_w = ggml_new_tensor_2d(ctx, wtype, model_channels, time_embed_dim); - time_embed_0_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, time_embed_dim); - - time_embed_2_w = ggml_new_tensor_2d(ctx, wtype, time_embed_dim, time_embed_dim); - time_embed_2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, time_embed_dim); - - // input_blocks - input_block_0_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, in_channels, model_channels); - input_block_0_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, model_channels); - int ds = 1; - int len_mults = sizeof(channel_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - input_res_blocks[i][j].init_params(ctx, wtype); - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - input_transformers[i][j].init_params(ctx, wtype); - } - } - if (i != len_mults - 1) { - input_down_samples[i].init_params(ctx, wtype); - ds *= 2; - } - } - - // middle_blocks - middle_block_0.init_params(ctx, wtype); - middle_block_1.init_params(ctx, wtype); - middle_block_2.init_params(ctx, wtype); - - // output_blocks - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - output_res_blocks[i][j].init_params(ctx, wtype); - - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - output_transformers[i][j].init_params(ctx, wtype); - } - - if (i > 0 && j == num_res_blocks) { - output_up_samples[i - 1].init_params(ctx, wtype); - - ds /= 2; - } - } - } - - // out - out_0_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, model_channels); - out_0_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, model_channels); - - out_2_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, model_channels, out_channels); - out_2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "time_embed.0.weight"] = time_embed_0_w; - tensors[prefix + "time_embed.0.bias"] = time_embed_0_b; - - tensors[prefix + "time_embed.2.weight"] = time_embed_2_w; - tensors[prefix + "time_embed.2.bias"] = time_embed_2_b; - - // input_blocks - tensors[prefix + "input_blocks.0.0.weight"] = input_block_0_w; - tensors[prefix + "input_blocks.0.0.bias"] = input_block_0_b; - - int len_mults = sizeof(channel_mult) / sizeof(int); - int input_block_idx = 0; - int ds = 1; - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - input_block_idx += 1; - - input_res_blocks[i][j].map_by_name(tensors, prefix + "input_blocks." + std::to_string(input_block_idx) + ".0."); - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - input_transformers[i][j].map_by_name(tensors, prefix + "input_blocks." + std::to_string(input_block_idx) + ".1."); - } - } - if (i != len_mults - 1) { - input_block_idx += 1; - input_down_samples[i].map_by_name(tensors, prefix + "input_blocks." + std::to_string(input_block_idx) + ".0."); - ds *= 2; - } - } - - // middle_blocks - middle_block_0.map_by_name(tensors, prefix + "middle_block.0."); - middle_block_1.map_by_name(tensors, prefix + "middle_block.1."); - middle_block_2.map_by_name(tensors, prefix + "middle_block.2."); - - // output_blocks - int output_block_idx = 0; - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - output_res_blocks[i][j].map_by_name(tensors, prefix + "output_blocks." + std::to_string(output_block_idx) + ".0."); - - int up_sample_idx = 1; - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - output_transformers[i][j].map_by_name(tensors, prefix + "output_blocks." + std::to_string(output_block_idx) + ".1."); - up_sample_idx++; - } - - if (i > 0 && j == num_res_blocks) { - output_up_samples[i - 1].map_by_name(tensors, prefix + "output_blocks." + std::to_string(output_block_idx) + "." + std::to_string(up_sample_idx) + "."); - - ds /= 2; - } - output_block_idx += 1; - } - } - - // out - tensors[prefix + "out.0.weight"] = out_0_w; - tensors[prefix + "out.0.bias"] = out_0_b; - tensors[prefix + "out.2.weight"] = out_2_w; - tensors[prefix + "out.2.bias"] = out_2_b; - } - - struct ggml_tensor* forward(struct ggml_context* ctx, - struct ggml_tensor* x, - struct ggml_tensor* timesteps, - struct ggml_tensor* context, - struct ggml_tensor* t_emb = NULL) { - // x: [N, in_channels, h, w] - // timesteps: [N, ] - // t_emb: [N, model_channels] - // context: [N, max_position, hidden_size]([N, 77, 768]) - if (t_emb == NULL && timesteps != NULL) { - t_emb = new_timestep_embedding(ctx, timesteps, model_channels); // [N, model_channels] - } - - // time_embed - auto emb = ggml_mul_mat(ctx, time_embed_0_w, t_emb); - emb = ggml_add(ctx, ggml_repeat(ctx, time_embed_0_b, emb), emb); - emb = ggml_silu_inplace(ctx, emb); - emb = ggml_mul_mat(ctx, time_embed_2_w, emb); - emb = ggml_add(ctx, ggml_repeat(ctx, time_embed_2_b, emb), emb); // [N, time_embed_dim] - - // input_blocks - std::vector hs; - // input block 0 - auto h = ggml_conv_2d(ctx, input_block_0_w, x, 1, 1, 1, 1, 1, 1); // [N, model_channels, h, w] - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, input_block_0_b, 1, 1, input_block_0_b->ne[0], 1), - h)); // [N, model_channels, h, w] - hs.push_back(h); - // input block 1-11 - int len_mults = sizeof(channel_mult) / sizeof(int); - int ds = 1; - for (int i = 0; i < len_mults; i++) { - int mult = channel_mult[i]; - for (int j = 0; j < num_res_blocks; j++) { - h = input_res_blocks[i][j].forward(ctx, h, emb); // [N, mult*model_channels, h, w] - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - h = input_transformers[i][j].forward(ctx, h, context); // [N, mult*model_channels, h, w] - } - hs.push_back(h); - } - if (i != len_mults - 1) { - ds *= 2; - h = input_down_samples[i].forward(ctx, h); // [N, mult*model_channels, h/(2^(i+1)), w/(2^(i+1))] - hs.push_back(h); - } - } - // [N, 4*model_channels, h/8, w/8] - - // middle_block - h = middle_block_0.forward(ctx, h, emb); // [N, 4*model_channels, h/8, w/8] - h = middle_block_1.forward(ctx, h, context); // [N, 4*model_channels, h/8, w/8] - h = middle_block_2.forward(ctx, h, emb); // [N, 4*model_channels, h/8, w/8] - - // output_blocks - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - auto h_skip = hs.back(); - hs.pop_back(); - - h = ggml_concat(ctx, h, h_skip); - h = output_res_blocks[i][j].forward(ctx, h, emb); - - if (ds == attention_resolutions[0] || ds == attention_resolutions[1] || ds == attention_resolutions[2]) { - h = output_transformers[i][j].forward(ctx, h, context); - } - - if (i > 0 && j == num_res_blocks) { - h = output_up_samples[i - 1].forward(ctx, h); - - ds /= 2; - } - } - } - - // out - // group norm 32 - h = ggml_group_norm(ctx, h); - h = ggml_add(ctx, - ggml_mul(ctx, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, out_0_w, 1, 1, out_0_w->ne[0], 1), - h), - h), - ggml_repeat(ctx, - ggml_reshape_4d(ctx, out_0_b, 1, 1, out_0_b->ne[0], 1), - h)); - // silu - h = ggml_silu_inplace(ctx, h); - // conv2d - h = ggml_conv_2d(ctx, out_2_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, out_2_b, 1, 1, out_2_b->ne[0], 1), - h)); // [N, out_channels, h, w] - - return h; - } -}; - -/*================================================== AutoEncoderKL ===================================================*/ - -struct ResnetBlock { - // network hparams - int in_channels; - int out_channels; - - // network params - struct ggml_tensor* norm1_w; // [in_channels, ] - struct ggml_tensor* norm1_b; // [in_channels, ] - - struct ggml_tensor* conv1_w; // [out_channels, in_channels, 3, 3] - struct ggml_tensor* conv1_b; // [out_channels, ] - - struct ggml_tensor* norm2_w; // [out_channels, ] - struct ggml_tensor* norm2_b; // [out_channels, ] - - struct ggml_tensor* conv2_w; // [out_channels, out_channels, 3, 3] - struct ggml_tensor* conv2_b; // [out_channels, ] - - // nin_shortcut, only if out_channels != in_channels - struct ggml_tensor* nin_shortcut_w; // [out_channels, in_channels, 1, 1] - struct ggml_tensor* nin_shortcut_b; // [out_channels, ] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += 2 * in_channels * ggml_type_sizef(GGML_TYPE_F32); // norm1_w/b - mem_size += out_channels * in_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv1_w - mem_size += 4 * out_channels * ggml_type_sizef(GGML_TYPE_F32); // conv1_b/norm2_w/norm2_b/conv2_b - mem_size += out_channels * out_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv2_w - - mem_size += 8 * ggml_tensor_overhead(); // object overhead - - if (out_channels != in_channels) { - mem_size += out_channels * in_channels * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // nin_shortcut_w - mem_size += out_channels * ggml_type_sizef(GGML_TYPE_F32); // nin_shortcut_b - - mem_size += 2 * ggml_tensor_overhead(); // object overhead - } - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - norm1_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - norm1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - conv1_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, in_channels, out_channels); - conv1_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - - norm2_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - norm2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - conv2_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, out_channels, out_channels); - conv2_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - - if (out_channels != in_channels) { - nin_shortcut_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, out_channels); - nin_shortcut_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_channels); - } - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "norm1.weight"] = norm1_w; - tensors[prefix + "norm1.bias"] = norm1_b; - tensors[prefix + "conv1.weight"] = conv1_w; - tensors[prefix + "conv1.bias"] = conv1_b; - - tensors[prefix + "norm2.weight"] = norm2_w; - tensors[prefix + "norm2.bias"] = norm2_b; - tensors[prefix + "conv2.weight"] = conv2_w; - tensors[prefix + "conv2.bias"] = conv2_b; - - if (out_channels != in_channels) { - tensors[prefix + "nin_shortcut.weight"] = nin_shortcut_w; - tensors[prefix + "nin_shortcut.bias"] = nin_shortcut_b; - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* z) { - // z: [N, in_channels, h, w] - - // group norm 32 - auto h = ggml_group_norm(ctx, z); - h = ggml_mul(ctx, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, norm1_w, 1, 1, norm1_w->ne[0], 1), - h), - h); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, norm1_b, 1, 1, norm1_b->ne[0], 1), - h)); - // silu - h = ggml_silu_inplace(ctx, h); - // conv2d - h = ggml_conv_2d(ctx, conv1_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv1_b, 1, 1, conv1_b->ne[0], 1), - h)); // [N, out_channels, h, w] - - // group norm 32 - h = ggml_group_norm(ctx, h); - h = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, norm2_w, 1, 1, norm2_w->ne[0], 1), h), h), - ggml_repeat(ctx, ggml_reshape_4d(ctx, norm2_b, 1, 1, norm2_b->ne[0], 1), h)); - // silu - h = ggml_silu_inplace(ctx, h); - // dropout, skip for inference - // conv2d - h = ggml_conv_2d(ctx, conv2_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv2_b, 1, 1, conv2_b->ne[0], 1), - h)); // [N, out_channels, h, w - - // skip connection - if (out_channels != in_channels) { - z = ggml_conv_2d(ctx, nin_shortcut_w, z, 1, 1, 0, 0, 1, 1); - z = ggml_add(ctx, - z, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, nin_shortcut_b, 1, 1, nin_shortcut_b->ne[0], 1), - z)); // [N, out_channels, h, w] - } - h = ggml_add(ctx, h, z); - return h; // [N, out_channels, h, w] - } -}; - -struct AttnBlock { - int in_channels; // mult * model_channels - - // group norm - struct ggml_tensor* norm_w; // [in_channels,] - struct ggml_tensor* norm_b; // [in_channels,] - - // q/k/v - struct ggml_tensor* q_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* q_b; // [in_channels,] - struct ggml_tensor* k_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* k_b; // [in_channels,] - struct ggml_tensor* v_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* v_b; // [in_channels,] - - // proj_out - struct ggml_tensor* proj_out_w; // [in_channels, in_channels, 1, 1] - struct ggml_tensor* proj_out_b; // [in_channels,] - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - mem_size += 6 * in_channels * ggml_type_sizef(GGML_TYPE_F32); // norm_w/norm_b/q_b/k_v/v_b/proj_out_b - mem_size += 4 * in_channels * in_channels * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // q_w/k_w/v_w/proj_out_w - mem_size += 10 * ggml_tensor_overhead(); // object overhead - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - norm_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - norm_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - q_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - q_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - k_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - k_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - v_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - v_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - - proj_out_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, in_channels, in_channels); - proj_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, in_channels); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "norm.weight"] = norm_w; - tensors[prefix + "norm.bias"] = norm_b; - tensors[prefix + "q.weight"] = q_w; - tensors[prefix + "q.bias"] = q_b; - tensors[prefix + "k.weight"] = k_w; - tensors[prefix + "k.bias"] = k_b; - tensors[prefix + "v.weight"] = v_w; - tensors[prefix + "v.bias"] = v_b; - tensors[prefix + "proj_out.weight"] = proj_out_w; - tensors[prefix + "proj_out.bias"] = proj_out_b; - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, in_channels, h, w] - - // group norm 32 - auto h_ = ggml_group_norm(ctx, x); - h_ = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_w, 1, 1, norm_w->ne[0], 1), h_), h_), - ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_b, 1, 1, norm_b->ne[0], 1), h_)); - - const int64_t n = h_->ne[3]; - const int64_t c = h_->ne[2]; - const int64_t h = h_->ne[1]; - const int64_t w = h_->ne[0]; - // q - auto q = ggml_conv_2d(ctx, q_w, h_, 1, 1, 0, 0, 1, 1); - q = ggml_add(ctx, - q, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, q_b, 1, 1, q_b->ne[0], 1), - q)); // [N, in_channels, h, w] - - // k - auto k = ggml_conv_2d(ctx, k_w, h_, 1, 1, 0, 0, 1, 1); - k = ggml_add(ctx, - k, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, k_b, 1, 1, k_b->ne[0], 1), - k)); // [N, in_channels, h, w] - - // v - auto v = ggml_conv_2d(ctx, v_w, h_, 1, 1, 0, 0, 1, 1); - v = ggml_add(ctx, - v, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, v_b, 1, 1, v_b->ne[0], 1), - v)); // [N, in_channels, h, w] - - q = ggml_cont(ctx, ggml_permute(ctx, q, 1, 2, 0, 3)); // [N, h, w, in_channels] - q = ggml_reshape_3d(ctx, q, c, h * w, n); // [N, h * w, in_channels] - - k = ggml_cont(ctx, ggml_permute(ctx, k, 1, 2, 0, 3)); // [N, h, w, in_channels] - k = ggml_reshape_3d(ctx, k, c, h * w, n); // [N, h * w, in_channels] - - auto w_ = ggml_mul_mat(ctx, k, q); // [N, h * w, h * w] - w_ = ggml_scale_inplace(ctx, w_, ggml_new_f32(ctx, 1.0f / sqrt((float)c))); - w_ = ggml_soft_max_inplace(ctx, w_); - - v = ggml_reshape_3d(ctx, v, h * w, c, n); // [N, in_channels, h * w] - h_ = ggml_mul_mat(ctx, v, w_); // [N, h * w, in_channels] - h_ = ggml_cont(ctx, ggml_permute(ctx, h_, 1, 0, 2, 3)); // [N, in_channels, h * w] - h_ = ggml_reshape_4d(ctx, h_, w, h, c, n); // [N, in_channels, h, w] - - // proj_out - h_ = ggml_conv_2d(ctx, proj_out_w, h_, 1, 1, 0, 0, 1, 1); - h_ = ggml_add(ctx, - h_, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, proj_out_b, 1, 1, proj_out_b->ne[0], 1), - h_)); // [N, in_channels, h, w] - h_ = ggml_add(ctx, h_, x); - return h_; - } -}; - -// ldm.modules.diffusionmodules.model.Encoder -struct Encoder { - int embed_dim = 4; - int ch = 128; - int z_channels = 4; - int in_channels = 3; - int num_res_blocks = 2; - int ch_mult[4] = {1, 2, 4, 4}; - - struct ggml_tensor* conv_in_w; // [ch, in_channels, 3, 3] - struct ggml_tensor* conv_in_b; // [ch, ] - - ResnetBlock down_blocks[4][2]; - DownSample down_samples[3]; - - struct - { - ResnetBlock block_1; - AttnBlock attn_1; - ResnetBlock block_2; - } mid; - - // block_in = ch * ch_mult[len_mults - 1] - struct ggml_tensor* norm_out_w; // [block_in, ] - struct ggml_tensor* norm_out_b; // [block_in, ] - - struct ggml_tensor* conv_out_w; // [embed_dim*2, block_in, 3, 3] - struct ggml_tensor* conv_out_b; // [embed_dim*2, ] - - Encoder() { - int len_mults = sizeof(ch_mult) / sizeof(int); - - int block_in = 1; - for (int i = 0; i < len_mults; i++) { - if (i == 0) { - block_in = ch; - } else { - block_in = ch * ch_mult[i - 1]; - } - int block_out = ch * ch_mult[i]; - for (int j = 0; j < num_res_blocks; j++) { - down_blocks[i][j].in_channels = block_in; - down_blocks[i][j].out_channels = block_out; - block_in = block_out; - } - if (i != len_mults - 1) { - down_samples[i].channels = block_in; - down_samples[i].out_channels = block_in; - down_samples[i].vae_downsample = true; - } - } - - mid.block_1.in_channels = block_in; - mid.block_1.out_channels = block_in; - mid.attn_1.in_channels = block_in; - mid.block_2.in_channels = block_in; - mid.block_2.out_channels = block_in; - } - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - int len_mults = sizeof(ch_mult) / sizeof(int); - int block_in = ch * ch_mult[len_mults - 1]; - - mem_size += ch * in_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv_in_w - mem_size += ch * ggml_type_sizef(GGML_TYPE_F32); // conv_in_b - - mem_size += 2 * block_in * ggml_type_sizef(GGML_TYPE_F32); // norm_out_w/b - - mem_size += z_channels * 2 * block_in * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv_out_w - mem_size += z_channels * 2 * ggml_type_sizef(GGML_TYPE_F32); // conv_out_b - - mem_size += 6 * ggml_tensor_overhead(); // object overhead - - mem_size += mid.block_1.compute_params_mem_size(wtype); - mem_size += mid.attn_1.compute_params_mem_size(wtype); - mem_size += mid.block_2.compute_params_mem_size(wtype); - - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - mem_size += down_blocks[i][j].compute_params_mem_size(wtype); - } - if (i != 0) { - mem_size += down_samples[i - 1].compute_params_mem_size(wtype); - } - } - - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - int len_mults = sizeof(ch_mult) / sizeof(int); - int block_in = ch * ch_mult[len_mults - 1]; - - conv_in_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, in_channels, ch); - conv_in_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ch); - - norm_out_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, block_in); - norm_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, block_in); - - conv_out_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, block_in, z_channels * 2); - conv_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, z_channels * 2); - - mid.block_1.init_params(ctx, wtype); - mid.attn_1.init_params(ctx, wtype); - mid.block_2.init_params(ctx, wtype); - - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - down_blocks[i][j].init_params(ctx, wtype); - } - if (i != len_mults - 1) { - down_samples[i].init_params(ctx, wtype); - } - } - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "norm_out.weight"] = norm_out_w; - tensors[prefix + "norm_out.bias"] = norm_out_b; - tensors[prefix + "conv_in.weight"] = conv_in_w; - tensors[prefix + "conv_in.bias"] = conv_in_b; - tensors[prefix + "conv_out.weight"] = conv_out_w; - tensors[prefix + "conv_out.bias"] = conv_out_b; - - mid.block_1.map_by_name(tensors, prefix + "mid.block_1."); - mid.attn_1.map_by_name(tensors, prefix + "mid.attn_1."); - mid.block_2.map_by_name(tensors, prefix + "mid.block_2."); - - int len_mults = sizeof(ch_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - down_blocks[i][j].map_by_name(tensors, prefix + "down." + std::to_string(i) + ".block." + std::to_string(j) + "."); - } - if (i != len_mults - 1) { - down_samples[i].map_by_name(tensors, prefix + "down." + std::to_string(i) + ".downsample."); - } - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, in_channels, h, w] - - // conv_in - auto h = ggml_conv_2d(ctx, conv_in_w, x, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv_in_b, 1, 1, conv_in_b->ne[0], 1), - h)); // [N, ch, h, w] - int len_mults = sizeof(ch_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - for (int j = 0; j < num_res_blocks; j++) { - h = down_blocks[i][j].forward(ctx, h); - } - if (i != len_mults - 1) { - h = down_samples[i].forward(ctx, h); - } - } +#include "ggml_extend.hpp" - h = mid.block_1.forward(ctx, h); - h = mid.attn_1.forward(ctx, h); - h = mid.block_2.forward(ctx, h); // [N, block_in, h, w] - - // group norm 32 - h = ggml_group_norm(ctx, h); - h = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_out_w, 1, 1, norm_out_w->ne[0], 1), h), h), - ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_out_b, 1, 1, norm_out_b->ne[0], 1), h)); - - // silu - // silu - h = ggml_silu_inplace(ctx, h); - - // conv_out - h = ggml_conv_2d(ctx, conv_out_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv_out_b, 1, 1, conv_out_b->ne[0], 1), - h)); // [N, z_channels*2, h, w] - - return h; - } -}; - -// ldm.modules.diffusionmodules.model.Decoder -struct Decoder { - int embed_dim = 4; - int ch = 128; - int z_channels = 4; - int out_ch = 3; - int num_res_blocks = 2; - int ch_mult[4] = {1, 2, 4, 4}; - - // block_in = ch * ch_mult[-1], 512 - struct ggml_tensor* conv_in_w; // [block_in, z_channels, 3, 3] - struct ggml_tensor* conv_in_b; // [block_in, ] - - struct - { - ResnetBlock block_1; - AttnBlock attn_1; - ResnetBlock block_2; - } mid; - - ResnetBlock up_blocks[4][3]; - UpSample up_samples[3]; - - struct ggml_tensor* norm_out_w; // [ch * ch_mult[0], ] - struct ggml_tensor* norm_out_b; // [ch * ch_mult[0], ] - - struct ggml_tensor* conv_out_w; // [out_ch, ch * ch_mult[0], 3, 3] - struct ggml_tensor* conv_out_b; // [out_ch, ] - - Decoder() { - int len_mults = sizeof(ch_mult) / sizeof(int); - int block_in = ch * ch_mult[len_mults - 1]; - - mid.block_1.in_channels = block_in; - mid.block_1.out_channels = block_in; - mid.attn_1.in_channels = block_in; - mid.block_2.in_channels = block_in; - mid.block_2.out_channels = block_in; - - for (int i = len_mults - 1; i >= 0; i--) { - int mult = ch_mult[i]; - int block_out = ch * mult; - for (int j = 0; j < num_res_blocks + 1; j++) { - up_blocks[i][j].in_channels = block_in; - up_blocks[i][j].out_channels = block_out; - block_in = block_out; - } - if (i != 0) { - up_samples[i - 1].channels = block_in; - up_samples[i - 1].out_channels = block_in; - } - } - } - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - int len_mults = sizeof(ch_mult) / sizeof(int); - int block_in = ch * ch_mult[len_mults - 1]; - - mem_size += block_in * z_channels * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv_in_w - mem_size += block_in * ggml_type_sizef(GGML_TYPE_F32); // conv_in_b - - mem_size += 2 * (ch * ch_mult[0]) * ggml_type_sizef(GGML_TYPE_F32); // norm_out_w/b - - mem_size += (ch * ch_mult[0]) * out_ch * 3 * 3 * ggml_type_sizef(GGML_TYPE_F16); // conv_out_w - mem_size += out_ch * ggml_type_sizef(GGML_TYPE_F32); // conv_out_b - - mem_size += 8 * ggml_tensor_overhead(); // object overhead - - mem_size += mid.block_1.compute_params_mem_size(wtype); - mem_size += mid.attn_1.compute_params_mem_size(wtype); - mem_size += mid.block_2.compute_params_mem_size(wtype); - - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - mem_size += up_blocks[i][j].compute_params_mem_size(wtype); - } - if (i != 0) { - mem_size += up_samples[i - 1].compute_params_mem_size(wtype); - } - } - - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - int len_mults = sizeof(ch_mult) / sizeof(int); - int block_in = ch * ch_mult[len_mults - 1]; - - norm_out_w = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ch * ch_mult[0]); - norm_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ch * ch_mult[0]); - - conv_in_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, z_channels, block_in); - conv_in_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, block_in); - - conv_out_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 3, 3, ch * ch_mult[0], out_ch); - conv_out_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, out_ch); - - mid.block_1.init_params(ctx, wtype); - mid.attn_1.init_params(ctx, wtype); - mid.block_2.init_params(ctx, wtype); - - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - up_blocks[i][j].init_params(ctx, wtype); - } - if (i != 0) { - up_samples[i - 1].init_params(ctx, wtype); - } - } - } - - void map_by_name(std::map& tensors, const std::string prefix) { - tensors[prefix + "norm_out.weight"] = norm_out_w; - tensors[prefix + "norm_out.bias"] = norm_out_b; - tensors[prefix + "conv_in.weight"] = conv_in_w; - tensors[prefix + "conv_in.bias"] = conv_in_b; - tensors[prefix + "conv_out.weight"] = conv_out_w; - tensors[prefix + "conv_out.bias"] = conv_out_b; - - mid.block_1.map_by_name(tensors, prefix + "mid.block_1."); - mid.attn_1.map_by_name(tensors, prefix + "mid.attn_1."); - mid.block_2.map_by_name(tensors, prefix + "mid.block_2."); - - int len_mults = sizeof(ch_mult) / sizeof(int); - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - up_blocks[i][j].map_by_name(tensors, prefix + "up." + std::to_string(i) + ".block." + std::to_string(j) + "."); - } - if (i != 0) { - up_samples[i - 1].map_by_name(tensors, prefix + "up." + std::to_string(i) + ".upsample."); - } - } - } - - struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* z) { - // z: [N, z_channels, h, w] - - // conv_in - auto h = ggml_conv_2d(ctx, conv_in_w, z, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv_in_b, 1, 1, conv_in_b->ne[0], 1), - h)); // [N, block_in, h, w] - - h = mid.block_1.forward(ctx, h); - h = mid.attn_1.forward(ctx, h); - h = mid.block_2.forward(ctx, h); // [N, block_in, h, w] - - int len_mults = sizeof(ch_mult) / sizeof(int); - for (int i = len_mults - 1; i >= 0; i--) { - for (int j = 0; j < num_res_blocks + 1; j++) { - h = up_blocks[i][j].forward(ctx, h); - } - if (i != 0) { - h = up_samples[i - 1].forward(ctx, h); - } - } - - // group norm 32 - h = ggml_group_norm(ctx, h); - h = ggml_add(ctx, - ggml_mul(ctx, ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_out_w, 1, 1, norm_out_w->ne[0], 1), h), h), - ggml_repeat(ctx, ggml_reshape_4d(ctx, norm_out_b, 1, 1, norm_out_b->ne[0], 1), h)); - - // silu - // silu - h = ggml_silu_inplace(ctx, h); - - // conv_out - h = ggml_conv_2d(ctx, conv_out_w, h, 1, 1, 1, 1, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, conv_out_b, 1, 1, conv_out_b->ne[0], 1), - h)); // [N, out_ch, h, w] - - return h; - } -}; - -// ldm.models.autoencoder.AutoencoderKL -struct AutoEncoderKL { - bool decode_only = true; - int embed_dim = 4; - struct - { - int z_channels = 4; - int resolution = 256; - int in_channels = 3; - int out_ch = 3; - int ch = 128; - int ch_mult[4] = {1, 2, 4, 4}; - int num_res_blocks = 2; - } dd_config; - - struct ggml_tensor* quant_conv_w; // [2*embed_dim, 2*z_channels, 1, 1] - struct ggml_tensor* quant_conv_b; // [2*embed_dim, ] - - struct ggml_tensor* post_quant_conv_w; // [z_channels, embed_dim, 1, 1] - struct ggml_tensor* post_quant_conv_b; // [z_channels, ] - - Encoder encoder; - Decoder decoder; - - AutoEncoderKL(bool decode_only = false) - : decode_only(decode_only) { - assert(sizeof(dd_config.ch_mult) == sizeof(encoder.ch_mult)); - assert(sizeof(dd_config.ch_mult) == sizeof(decoder.ch_mult)); - - encoder.embed_dim = embed_dim; - decoder.embed_dim = embed_dim; - encoder.ch = dd_config.ch; - decoder.ch = dd_config.ch; - encoder.z_channels = dd_config.z_channels; - decoder.z_channels = dd_config.z_channels; - encoder.in_channels = dd_config.in_channels; - decoder.out_ch = dd_config.out_ch; - encoder.num_res_blocks = dd_config.num_res_blocks; - - int len_mults = sizeof(dd_config.ch_mult) / sizeof(int); - for (int i = 0; i < len_mults; i++) { - encoder.ch_mult[i] = dd_config.ch_mult[i]; - decoder.ch_mult[i] = dd_config.ch_mult[i]; - } - } - - size_t compute_params_mem_size(ggml_type wtype) { - double mem_size = 0; - - if (!decode_only) { - mem_size += 2 * embed_dim * 2 * dd_config.z_channels * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // quant_conv_w - mem_size += 2 * embed_dim * ggml_type_sizef(GGML_TYPE_F32); // quant_conv_b - mem_size += encoder.compute_params_mem_size(wtype); - } - - mem_size += dd_config.z_channels * embed_dim * 1 * 1 * ggml_type_sizef(GGML_TYPE_F16); // post_quant_conv_w - mem_size += dd_config.z_channels * ggml_type_sizef(GGML_TYPE_F32); // post_quant_conv_b - - mem_size += decoder.compute_params_mem_size(wtype); - return static_cast(mem_size); - } - - void init_params(struct ggml_context* ctx, ggml_type wtype) { - if (!decode_only) { - quant_conv_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, 2 * dd_config.z_channels, 2 * embed_dim); - quant_conv_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 2 * embed_dim); - encoder.init_params(ctx, wtype); - } - - post_quant_conv_w = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, 1, 1, embed_dim, dd_config.z_channels); - post_quant_conv_b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, dd_config.z_channels); - decoder.init_params(ctx, wtype); - } - - void map_by_name(std::map& tensors, const std::string prefix) { - if (!decode_only) { - tensors[prefix + "quant_conv.weight"] = quant_conv_w; - tensors[prefix + "quant_conv.bias"] = quant_conv_b; - encoder.map_by_name(tensors, prefix + "encoder."); - } - - tensors[prefix + "post_quant_conv.weight"] = post_quant_conv_w; - tensors[prefix + "post_quant_conv.bias"] = post_quant_conv_b; - decoder.map_by_name(tensors, prefix + "decoder."); - } - - struct ggml_tensor* decode(struct ggml_context* ctx, struct ggml_tensor* z) { - // z: [N, z_channels, h, w] - - // post_quant_conv - auto h = ggml_conv_2d(ctx, post_quant_conv_w, z, 1, 1, 0, 0, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, post_quant_conv_b, 1, 1, post_quant_conv_b->ne[0], 1), - h)); // [N, z_channels, h, w] - h = decoder.forward(ctx, h); - return h; - } - - struct ggml_tensor* encode(struct ggml_context* ctx, struct ggml_tensor* x) { - // x: [N, in_channels, h, w] - auto h = encoder.forward(ctx, x); // [N, 2*z_channels, h/8, w/8] - // quant_conv - h = ggml_conv_2d(ctx, quant_conv_w, h, 1, 1, 0, 0, 1, 1); - h = ggml_add(ctx, - h, - ggml_repeat(ctx, - ggml_reshape_4d(ctx, quant_conv_b, 1, 1, quant_conv_b->ne[0], 1), - h)); // [N, 2*embed_dim, h/8, w/8] - return h; - } -}; - -/*================================================= CompVisDenoiser ==================================================*/ - -// Ref: https://github.com/crowsonkb/k-diffusion/blob/master/k_diffusion/external.py -struct CompVisDenoiser { - float alphas_cumprod[TIMESTEPS]; - float sigmas[TIMESTEPS]; - float log_sigmas[TIMESTEPS]; - - std::vector get_sigmas(int n) { - std::vector result; +#include "model.h" +#include "rng.hpp" +#include "rng_philox.hpp" +#include "stable-diffusion.h" +#include "util.h" + +#include "conditioner.hpp" +#include "control.hpp" +#include "denoiser.hpp" +#include "diffusion_model.hpp" +#include "esrgan.hpp" +#include "lora.hpp" +#include "pmid.hpp" +#include "tae.hpp" +#include "vae.hpp" + +#define STB_IMAGE_IMPLEMENTATION +#define STB_IMAGE_STATIC +#include "stb_image.h" + +// #define STB_IMAGE_WRITE_IMPLEMENTATION +// #define STB_IMAGE_WRITE_STATIC +// #include "stb_image_write.h" + +const char* model_version_to_str[] = { + "SD 1.x", + "SD 1.x Inpaint", + "SD 2.x", + "SD 2.x Inpaint", + "SDXL", + "SDXL Inpaint", + "SVD", + "SD3.x", + "Flux", + "Flux Fill"}; + +const char* sampling_methods_str[] = { + "Euler A", + "Euler", + "Heun", + "DPM2", + "DPM++ (2s)", + "DPM++ (2M)", + "modified DPM++ (2M)", + "iPNDM", + "iPNDM_v", + "LCM", + "DDIM \"trailing\"", + "TCD"}; - int t_max = TIMESTEPS - 1; - float step = static_cast(t_max) / static_cast(n - 1); - for (int i = 0; i < n; ++i) { - float t = t_max - step * i; - result.push_back(t_to_sigma(t)); - } - result.push_back(0); - return result; - } +/*================================================== Helper Functions ================================================*/ - std::pair get_scalings(float sigma) { - float c_out = -sigma; - float c_in = 1.0f / std::sqrt(sigma * sigma + 1); - return std::pair(c_in, c_out); +void calculate_alphas_cumprod(float* alphas_cumprod, + float linear_start = 0.00085f, + float linear_end = 0.0120, + int timesteps = TIMESTEPS) { + float ls_sqrt = sqrtf(linear_start); + float le_sqrt = sqrtf(linear_end); + float amount = le_sqrt - ls_sqrt; + float product = 1.0f; + for (int i = 0; i < timesteps; i++) { + float beta = ls_sqrt + amount * ((float)i / (timesteps - 1)); + product *= 1.0f - powf(beta, 2.0f); + alphas_cumprod[i] = product; } +} - float sigma_to_t(float sigma) { - float log_sigma = std::log(sigma); - std::vector dists; - dists.reserve(TIMESTEPS); - for (float log_sigma_val : log_sigmas) { - dists.push_back(log_sigma - log_sigma_val); - } - - int low_idx = 0; - for (size_t i = 0; i < TIMESTEPS; i++) { - if (dists[i] >= 0) { - low_idx++; - } - } - low_idx = std::min(std::max(low_idx - 1, 0), TIMESTEPS - 2); - int high_idx = low_idx + 1; +/*=============================================== StableDiffusionGGML ================================================*/ - float low = log_sigmas[low_idx]; - float high = log_sigmas[high_idx]; - float w = (low - log_sigma) / (low - high); - w = std::max(0.f, std::min(1.f, w)); - float t = (1.0f - w) * low_idx + w * high_idx; +class StableDiffusionGGML { +public: + ggml_backend_t backend = NULL; // general backend + ggml_backend_t clip_backend = NULL; + ggml_backend_t control_net_backend = NULL; + ggml_backend_t vae_backend = NULL; + ggml_type model_wtype = GGML_TYPE_COUNT; + ggml_type conditioner_wtype = GGML_TYPE_COUNT; + ggml_type diffusion_model_wtype = GGML_TYPE_COUNT; + ggml_type vae_wtype = GGML_TYPE_COUNT; + + SDVersion version; + bool vae_decode_only = false; + bool free_params_immediately = false; - return t; - } + std::shared_ptr rng = std::make_shared(); + int n_threads = -1; + float scale_factor = 0.18215f; - float t_to_sigma(float t) { - int low_idx = static_cast(std::floor(t)); - int high_idx = static_cast(std::ceil(t)); - float w = t - static_cast(low_idx); - float log_sigma = (1.0f - w) * log_sigmas[low_idx] + w * log_sigmas[high_idx]; - return std::exp(log_sigma); - } -}; + std::shared_ptr cond_stage_model; + std::shared_ptr clip_vision; // for svd + std::shared_ptr diffusion_model; + std::shared_ptr first_stage_model; + std::shared_ptr tae_first_stage; + std::shared_ptr control_net; + std::shared_ptr pmid_model; + std::shared_ptr pmid_lora; + std::shared_ptr pmid_id_embeds; -/*=============================================== StableDiffusionGGML ================================================*/ + std::string taesd_path; + bool use_tiny_autoencoder = false; + bool vae_tiling = false; + bool stacked_id = false; -class StableDiffusionGGML { - public: - ggml_context* clip_params_ctx = NULL; - ggml_context* unet_params_ctx = NULL; - ggml_context* vae_params_ctx = NULL; + bool is_using_v_parameterization = false; + bool is_using_edm_v_parameterization = false; - bool dynamic = true; - bool vae_decode_only = false; - bool free_params_immediately = false; - int32_t ftype = 1; - int n_threads = -1; - float scale_factor = 0.18215f; - size_t max_mem_size = 0; - size_t curr_params_mem_size = 0; - size_t max_params_mem_size = 0; - size_t max_rt_mem_size = 0; + std::map tensors; - FrozenCLIPEmbedderWithCustomWords cond_stage_model; - UNetModel diffusion_model; - AutoEncoderKL first_stage_model; + std::string lora_model_dir; + // lora_name => multiplier + std::unordered_map curr_lora_state; - CompVisDenoiser denoiser; + std::shared_ptr denoiser = std::make_shared(); StableDiffusionGGML() = default; StableDiffusionGGML(int n_threads, bool vae_decode_only, - bool free_params_immediately) + bool free_params_immediately, + std::string lora_model_dir, + rng_type_t rng_type) : n_threads(n_threads), vae_decode_only(vae_decode_only), - free_params_immediately(free_params_immediately) { - first_stage_model.decode_only = vae_decode_only; + free_params_immediately(free_params_immediately), + lora_model_dir(lora_model_dir) { + if (rng_type == STD_DEFAULT_RNG) { + rng = std::make_shared(); + } else if (rng_type == CUDA_RNG) { + rng = std::make_shared(); + } } ~StableDiffusionGGML() { - if (clip_params_ctx != NULL) { - ggml_free(clip_params_ctx); - clip_params_ctx = NULL; + if (clip_backend != backend) { + ggml_backend_free(clip_backend); } - if (unet_params_ctx != NULL) { - ggml_free(unet_params_ctx); - unet_params_ctx = NULL; + if (control_net_backend != backend) { + ggml_backend_free(control_net_backend); } - if (vae_params_ctx != NULL) { - ggml_free(vae_params_ctx); - vae_params_ctx = NULL; + if (vae_backend != backend) { + ggml_backend_free(vae_backend); } + ggml_backend_free(backend); } - bool load_from_file(const std::string& file_path) { - LOG_INFO("loading model from '%s'", file_path.c_str()); - - std::ifstream file(file_path, std::ios::binary); - if (!file.is_open()) { - LOG_ERROR("failed to open '%s'", file_path.c_str()); - return false; + bool load_from_file(const std::string& model_path, + const std::string& clip_l_path, + const std::string& clip_g_path, + const std::string& t5xxl_path, + const std::string& diffusion_model_path, + const std::string& vae_path, + const std::string control_net_path, + const std::string embeddings_path, + const std::string id_embeddings_path, + const std::string& taesd_path, + bool vae_tiling_, + ggml_type wtype, + schedule_t schedule, + bool clip_on_cpu, + bool control_net_cpu, + bool vae_on_cpu, + bool diffusion_flash_attn, + bool chroma_use_dit_mask, + bool chroma_use_t5_mask, + int chroma_t5_mask_pad) { + use_tiny_autoencoder = taesd_path.size() > 0; +#ifdef SD_USE_CUDA + LOG_DEBUG("Using CUDA backend"); + backend = ggml_backend_cuda_init(0); +#endif +#ifdef SD_USE_METAL + LOG_DEBUG("Using Metal backend"); + ggml_log_set(ggml_log_callback_default, nullptr); + backend = ggml_backend_metal_init(); +#endif +#ifdef SD_USE_VULKAN + LOG_DEBUG("Using Vulkan backend"); + for (int device = 0; device < ggml_backend_vk_get_device_count(); ++device) { + backend = ggml_backend_vk_init(device); } - - LOG_DEBUG("verifying magic"); - // verify magic - { - uint32_t magic; - file.read(reinterpret_cast(&magic), sizeof(magic)); - if (magic != GGML_FILE_MAGIC) { - LOG_ERROR("invalid model file '%s' (bad magic)", file_path.c_str()); - return false; - } + if (!backend) { + LOG_WARN("Failed to initialize Vulkan backend"); + } +#endif +#ifdef SD_USE_OPENCL + LOG_DEBUG("Using OpenCL backend"); + // ggml_log_set(ggml_log_callback_default, nullptr); // Optional ggml logs + backend = ggml_backend_opencl_init(); + if (!backend) { + LOG_WARN("Failed to initialize OpenCL backend"); } +#endif +#ifdef SD_USE_SYCL + LOG_DEBUG("Using SYCL backend"); + backend = ggml_backend_sycl_init(0); +#endif - LOG_DEBUG("loading hparams"); - // load hparams - file.read(reinterpret_cast(&ftype), sizeof(ftype)); - // for the big tensors, we have the option to store the data in 16-bit floats or quantized - // in order to save memory and also to speed up the computation - ggml_type wtype = ggml_ftype_to_ggml_type((ggml_ftype)(ftype)); - LOG_INFO("ftype: %s", ggml_type_name(wtype)); - if (wtype == GGML_TYPE_COUNT) { - LOG_ERROR("invalid model file '%s' (bad ftype value %d)", file_path.c_str(), ftype); - return false; + if (!backend) { + LOG_DEBUG("Using CPU backend"); + backend = ggml_backend_cpu_init(); } - LOG_DEBUG("loading vocab"); - // load vocab - { - int32_t n_vocab = 0; - file.read(reinterpret_cast(&n_vocab), sizeof(n_vocab)); + ModelLoader model_loader; - if (n_vocab != cond_stage_model.text_model.vocab_size) { - LOG_ERROR("invalid model file '%s' (bad vocab size %d != %d)", - file_path.c_str(), n_vocab, cond_stage_model.text_model.vocab_size); - return false; - } + vae_tiling = vae_tiling_; - std::string word; - std::vector buf(128); + if (model_path.size() > 0) { + LOG_INFO("loading model from '%s'", model_path.c_str()); + if (!model_loader.init_from_file(model_path)) { + LOG_ERROR("init model loader from file failed: '%s'", model_path.c_str()); + } + } - for (int i = 0; i < n_vocab; i++) { - uint32_t len; - file.read((char*)&len, sizeof(len)); + if (diffusion_model_path.size() > 0) { + LOG_INFO("loading diffusion model from '%s'", diffusion_model_path.c_str()); + if (!model_loader.init_from_file(diffusion_model_path, "model.diffusion_model.")) { + LOG_WARN("loading diffusion model from '%s' failed", diffusion_model_path.c_str()); + } + } - buf.resize(len); - file.read((char*)buf.data(), len); - word.assign(buf.data(), len); + bool is_unet = model_loader.model_is_unet(); - cond_stage_model.tokenizer.add_token(word, i); + if (clip_l_path.size() > 0) { + LOG_INFO("loading clip_l from '%s'", clip_l_path.c_str()); + if (!model_loader.init_from_file(clip_l_path, is_unet ? "cond_stage_model.transformer." : "text_encoders.clip_l.transformer.")) { + LOG_WARN("loading clip_l from '%s' failed", clip_l_path.c_str()); } } - // create the ggml context for network params - LOG_DEBUG("ggml tensor size = %d bytes", (int)sizeof(ggml_tensor)); - { - // cond_stage_model(FrozenCLIPEmbedder) - double ctx_size = 1 * 1024 * 1024; // 1 MB, for padding - ctx_size += cond_stage_model.text_model.compute_params_mem_size(wtype); - LOG_DEBUG("clip params ctx size = % 6.2f MB", ctx_size / (1024.0 * 1024.0)); - - struct ggml_init_params params; - params.mem_size = static_cast(ctx_size); - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = false; - - clip_params_ctx = ggml_init(params); - if (!clip_params_ctx) { - LOG_ERROR("ggml_init() failed"); - return false; + if (clip_g_path.size() > 0) { + LOG_INFO("loading clip_g from '%s'", clip_g_path.c_str()); + if (!model_loader.init_from_file(clip_g_path, is_unet ? "cond_stage_model.1.transformer." : "text_encoders.clip_g.transformer.")) { + LOG_WARN("loading clip_g from '%s' failed", clip_g_path.c_str()); } } - { - // diffusion_model(UNetModel) - double ctx_size = 1 * 1024 * 1024; // 1 MB, for padding - ctx_size += diffusion_model.compute_params_mem_size(wtype); - LOG_DEBUG("unet params ctx size = % 6.2f MB", ctx_size / (1024.0 * 1024.0)); - - struct ggml_init_params params; - params.mem_size = static_cast(ctx_size); - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = false; - - unet_params_ctx = ggml_init(params); - if (!unet_params_ctx) { - LOG_ERROR("ggml_init() failed"); - ggml_free(clip_params_ctx); - clip_params_ctx = NULL; - return false; + if (t5xxl_path.size() > 0) { + LOG_INFO("loading t5xxl from '%s'", t5xxl_path.c_str()); + if (!model_loader.init_from_file(t5xxl_path, "text_encoders.t5xxl.transformer.")) { + LOG_WARN("loading t5xxl from '%s' failed", t5xxl_path.c_str()); } } - { - // first_stage_model(AutoEncoderKL) - double ctx_size = 1 * 1024 * 1024; // 1 MB, for padding - ctx_size += first_stage_model.compute_params_mem_size(wtype); - LOG_DEBUG("vae params ctx size = % 6.2f MB", ctx_size / (1024.0 * 1024.0)); - - struct ggml_init_params params; - params.mem_size = static_cast(ctx_size); - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = false; - - vae_params_ctx = ggml_init(params); - if (!vae_params_ctx) { - LOG_ERROR("ggml_init() failed"); - ggml_free(clip_params_ctx); - clip_params_ctx = NULL; - ggml_free(unet_params_ctx); - unet_params_ctx = NULL; - return false; + if (vae_path.size() > 0) { + LOG_INFO("loading vae from '%s'", vae_path.c_str()); + if (!model_loader.init_from_file(vae_path, "vae.")) { + LOG_WARN("loading vae from '%s' failed", vae_path.c_str()); } } - std::map tensors; - - LOG_DEBUG("preparing memory for the weights"); - // prepare memory for the weights - { - // cond_stage_model(FrozenCLIPEmbedder) - cond_stage_model.text_model.init_params(clip_params_ctx, wtype); - cond_stage_model.text_model.map_by_name(tensors, "cond_stage_model.transformer.text_model."); + version = model_loader.get_sd_version(); + if (version == VERSION_COUNT) { + LOG_ERROR("get sd version from file failed: '%s'", model_path.c_str()); + return false; + } - // diffusion_model(UNetModel) - diffusion_model.init_params(unet_params_ctx, wtype); - diffusion_model.map_by_name(tensors, "model.diffusion_model."); + LOG_INFO("Version: %s ", model_version_to_str[version]); + if (wtype == GGML_TYPE_COUNT) { + model_wtype = model_loader.get_sd_wtype(); + if (model_wtype == GGML_TYPE_COUNT) { + model_wtype = GGML_TYPE_F32; + LOG_WARN("can not get mode wtype frome weight, use f32"); + } + conditioner_wtype = model_loader.get_conditioner_wtype(); + if (conditioner_wtype == GGML_TYPE_COUNT) { + conditioner_wtype = wtype; + } + diffusion_model_wtype = model_loader.get_diffusion_model_wtype(); + if (diffusion_model_wtype == GGML_TYPE_COUNT) { + diffusion_model_wtype = wtype; + } + vae_wtype = model_loader.get_vae_wtype(); - // firest_stage_model(AutoEncoderKL) - first_stage_model.init_params(vae_params_ctx, wtype); - first_stage_model.map_by_name(tensors, "first_stage_model."); + if (vae_wtype == GGML_TYPE_COUNT) { + vae_wtype = wtype; + } + } else { + model_wtype = wtype; + conditioner_wtype = wtype; + diffusion_model_wtype = wtype; + vae_wtype = wtype; + model_loader.set_wtype_override(wtype); } - LOG_DEBUG("loading weights"); - std::set tensor_names_in_file; - int64_t t0 = ggml_time_ms(); - // load weights - { - int n_tensors = 0; - size_t total_size = 0; + if (sd_version_is_sdxl(version)) { + vae_wtype = GGML_TYPE_F32; + model_loader.set_wtype_override(GGML_TYPE_F32, "vae."); + } - while (true) { - int32_t n_dims; - int32_t length; - int32_t ttype; + LOG_INFO("Weight type: %s", model_wtype != GGML_TYPE_COUNT ? ggml_type_name(model_wtype) : "??"); + LOG_INFO("Conditioner weight type: %s", conditioner_wtype != GGML_TYPE_COUNT ? ggml_type_name(conditioner_wtype) : "??"); + LOG_INFO("Diffusion model weight type: %s", diffusion_model_wtype != GGML_TYPE_COUNT ? ggml_type_name(diffusion_model_wtype) : "??"); + LOG_INFO("VAE weight type: %s", vae_wtype != GGML_TYPE_COUNT ? ggml_type_name(vae_wtype) : "??"); - file.read(reinterpret_cast(&n_dims), sizeof(n_dims)); - file.read(reinterpret_cast(&length), sizeof(length)); - file.read(reinterpret_cast(&ttype), sizeof(ttype)); + LOG_DEBUG("ggml tensor size = %d bytes", (int)sizeof(ggml_tensor)); - if (file.eof()) { - break; + if (sd_version_is_sdxl(version)) { + scale_factor = 0.13025f; + if (vae_path.size() == 0 && taesd_path.size() == 0) { + LOG_WARN( + "!!!It looks like you are using SDXL model. " + "If you find that the generated images are completely black, " + "try specifying SDXL VAE FP16 Fix with the --vae parameter. " + "You can find it here: https://huggingface.co/madebyollin/sdxl-vae-fp16-fix/blob/main/sdxl_vae.safetensors"); + } + } else if (sd_version_is_sd3(version)) { + scale_factor = 1.5305f; + } else if (sd_version_is_flux(version)) { + scale_factor = 0.3611; + // TODO: shift_factor + } + + if (version == VERSION_SVD) { + clip_vision = std::make_shared(backend, model_loader.tensor_storages_types); + clip_vision->alloc_params_buffer(); + clip_vision->get_param_tensors(tensors); + + diffusion_model = std::make_shared(backend, model_loader.tensor_storages_types, version); + diffusion_model->alloc_params_buffer(); + diffusion_model->get_param_tensors(tensors); + + first_stage_model = std::make_shared(backend, model_loader.tensor_storages_types, "first_stage_model", vae_decode_only, true, version); + LOG_DEBUG("vae_decode_only %d", vae_decode_only); + first_stage_model->alloc_params_buffer(); + first_stage_model->get_param_tensors(tensors, "first_stage_model"); + } else { + clip_backend = backend; + bool use_t5xxl = false; + if (sd_version_is_dit(version)) { + use_t5xxl = true; + } + if (!ggml_backend_is_cpu(backend) && use_t5xxl && conditioner_wtype != GGML_TYPE_F32) { + clip_on_cpu = true; + LOG_INFO("set clip_on_cpu to true"); + } + if (clip_on_cpu && !ggml_backend_is_cpu(backend)) { + LOG_INFO("CLIP: Using CPU backend"); + clip_backend = ggml_backend_cpu_init(); + } + if (diffusion_flash_attn) { + LOG_INFO("Using flash attention in the diffusion model"); + } + if (sd_version_is_sd3(version)) { + if (diffusion_flash_attn) { + LOG_WARN("flash attention in this diffusion model is currently unsupported!"); } - - int32_t nelements = 1; - int32_t ne[4] = {1, 1, 1, 1}; - for (int i = 0; i < n_dims; ++i) { - file.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); - nelements *= ne[i]; + cond_stage_model = std::make_shared(clip_backend, model_loader.tensor_storages_types); + diffusion_model = std::make_shared(backend, model_loader.tensor_storages_types); + } else if (sd_version_is_flux(version)) { + bool is_chroma = false; + for (auto pair : model_loader.tensor_storages_types) { + if (pair.first.find("distilled_guidance_layer.in_proj.weight") != std::string::npos) { + is_chroma = true; + break; + } } + if (is_chroma) { + cond_stage_model = std::make_shared(clip_backend, model_loader.tensor_storages_types, -1, chroma_use_t5_mask, chroma_t5_mask_pad); + } else { + cond_stage_model = std::make_shared(clip_backend, model_loader.tensor_storages_types); + } + diffusion_model = std::make_shared(backend, model_loader.tensor_storages_types, version, diffusion_flash_attn, chroma_use_dit_mask); + } else { + if (id_embeddings_path.find("v2") != std::string::npos) { + cond_stage_model = std::make_shared(clip_backend, model_loader.tensor_storages_types, embeddings_path, version, PM_VERSION_2); + } else { + cond_stage_model = std::make_shared(clip_backend, model_loader.tensor_storages_types, embeddings_path, version); + } + diffusion_model = std::make_shared(backend, model_loader.tensor_storages_types, version, diffusion_flash_attn); + } - std::string name(length, 0); - file.read(&name[0], length); - - tensor_names_in_file.insert(std::string(name.data())); + cond_stage_model->alloc_params_buffer(); + cond_stage_model->get_param_tensors(tensors); - if (std::string(name.data()) == "alphas_cumprod") { - file.read(reinterpret_cast(denoiser.alphas_cumprod), - nelements * ggml_type_size((ggml_type)ttype)); - for (int i = 0; i < 1000; i++) { - denoiser.sigmas[i] = std::sqrt((1 - denoiser.alphas_cumprod[i]) / denoiser.alphas_cumprod[i]); - denoiser.log_sigmas[i] = std::log(denoiser.sigmas[i]); - } - continue; - } + diffusion_model->alloc_params_buffer(); + diffusion_model->get_param_tensors(tensors); - struct ggml_tensor* tensor; - if (tensors.find(name.data()) != tensors.end()) { - tensor = tensors[name.data()]; + if (!use_tiny_autoencoder) { + if (vae_on_cpu && !ggml_backend_is_cpu(backend)) { + LOG_INFO("VAE Autoencoder: Using CPU backend"); + vae_backend = ggml_backend_cpu_init(); } else { - if (name.find("quant") == std::string::npos && name.find("first_stage_model.encoder.") == std::string::npos) { - LOG_WARN("unknown tensor '%s' in model file", name.data()); - } else { - if (!vae_decode_only) { - LOG_WARN("unknown tensor '%s' in model file", name.data()); - return false; - } - } - file.ignore(nelements * ggml_type_size((ggml_type)ttype)); - continue; + vae_backend = backend; } + first_stage_model = std::make_shared(vae_backend, model_loader.tensor_storages_types, "first_stage_model", vae_decode_only, false, version); + first_stage_model->alloc_params_buffer(); + first_stage_model->get_param_tensors(tensors, "first_stage_model"); + } else { + tae_first_stage = std::make_shared(backend, model_loader.tensor_storages_types, "decoder.layers", vae_decode_only, version); + } + // first_stage_model->get_param_tensors(tensors, "first_stage_model."); - if (tensor->ne[0] != ne[0] || tensor->ne[1] != ne[1] || tensor->ne[2] != ne[2] || tensor->ne[3] != ne[3]) { - LOG_ERROR( - "tensor '%s' has wrong shape in model file: " - "got [%d, %d, %d, %d], expected [%d, %d, %d, %d]", - name.data(), - ne[0], ne[1], ne[2], ne[3], - (int)tensor->ne[0], (int)tensor->ne[1], (int)tensor->ne[2], (int)tensor->ne[3]); - return false; + if (control_net_path.size() > 0) { + ggml_backend_t controlnet_backend = NULL; + if (control_net_cpu && !ggml_backend_is_cpu(backend)) { + LOG_DEBUG("ControlNet: Using CPU backend"); + controlnet_backend = ggml_backend_cpu_init(); + } else { + controlnet_backend = backend; } + control_net = std::make_shared(controlnet_backend, model_loader.tensor_storages_types, version); + } - if (ggml_nelements(tensor) != nelements) { - LOG_ERROR( - "tensor '%s' has wrong number of elements in model file: " - "got %u, expert %zu", - name.data(), nelements, ggml_nelements(tensor)); + if (id_embeddings_path.find("v2") != std::string::npos) { + pmid_model = std::make_shared(backend, model_loader.tensor_storages_types, "pmid", version, PM_VERSION_2); + LOG_INFO("using PhotoMaker Version 2"); + } else { + pmid_model = std::make_shared(backend, model_loader.tensor_storages_types, "pmid", version); + } + if (id_embeddings_path.size() > 0) { + pmid_lora = std::make_shared(backend, id_embeddings_path, ""); + if (!pmid_lora->load_from_file(true)) { + LOG_WARN("load photomaker lora tensors from %s failed", id_embeddings_path.c_str()); return false; } - - if (tensor->type != ttype) { - LOG_ERROR("tensor '%s' has wrong type in model file: got %s, expect %s", - name.data(), ggml_type_name(ggml_type(ttype)), ggml_type_name(tensor->type)); - return false; + LOG_INFO("loading stacked ID embedding (PHOTOMAKER) model file from '%s'", id_embeddings_path.c_str()); + if (!model_loader.init_from_file(id_embeddings_path, "pmid.")) { + LOG_WARN("loading stacked ID embedding from '%s' failed", id_embeddings_path.c_str()); + } else { + stacked_id = true; } - - const size_t num_bytes = nelements / ggml_blck_size(ggml_type(ttype)) * ggml_type_size(ggml_type(ttype)); - - file.read(reinterpret_cast(tensor->data), num_bytes); - - total_size += ggml_nbytes(tensor); } - bool some_tensor_not_init = false; - for (auto pair : tensors) { - if (tensor_names_in_file.find(pair.first) == tensor_names_in_file.end()) { - LOG_ERROR("tensor '%s' not in model file", pair.first.c_str()); - some_tensor_not_init = true; + if (stacked_id) { + if (!pmid_model->alloc_params_buffer()) { + LOG_ERROR(" pmid model params buffer allocation failed"); + return false; } + pmid_model->get_param_tensors(tensors, "pmid"); } - if (tensor_names_in_file.find("alphas_cumprod") == tensor_names_in_file.end()) { - LOG_ERROR("tensor alphas_cumprod not in model file"); - some_tensor_not_init = true; - } - if (some_tensor_not_init) { - file.close(); - return false; - } - LOG_DEBUG("model size = %.2fMB", total_size / 1024.0 / 1024.0); - } - max_params_mem_size = ggml_used_mem(clip_params_ctx) + ggml_used_mem(unet_params_ctx) + ggml_used_mem(vae_params_ctx); - max_mem_size = max_params_mem_size; - curr_params_mem_size = max_params_mem_size; - LOG_INFO("total params size = %.2fMB (clip %.2fMB, unet %.2fMB, vae %.2fMB)", - max_params_mem_size / 1024.0 / 1024.0, - ggml_used_mem(clip_params_ctx) / 1024.0 / 1024.0, - ggml_used_mem(unet_params_ctx) / 1024.0 / 1024.0, - ggml_used_mem(vae_params_ctx) / 1024.0 / 1024.0); - int64_t t1 = ggml_time_ms(); - LOG_INFO("loading model from '%s' completed, taking %.2fs", file_path.c_str(), (t1 - t0) * 1.0f / 1000); - file.close(); - return true; - } - - ggml_tensor* get_learned_condition(ggml_context* res_ctx, const std::string& text) { - auto tokens_and_weights = cond_stage_model.tokenize(text, - cond_stage_model.text_model.max_position_embeddings, - true); - std::vector& tokens = tokens_and_weights.first; - std::vector& weights = tokens_and_weights.second; - size_t ctx_size = 1 * 1024 * 1024; // 1MB - // calculate the amount of memory required - { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = true; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; - } - - ggml_set_dynamic(ctx, false); - struct ggml_tensor* input_ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, tokens.size()); - ggml_set_dynamic(ctx, params.dynamic); - - struct ggml_tensor* hidden_states = cond_stage_model.text_model.forward(ctx, input_ids); - - struct ggml_cgraph cond_graph = ggml_build_forward(hidden_states); - struct ggml_cplan cplan = ggml_graph_plan(&cond_graph, n_threads); - ctx_size += cplan.work_size; - - ctx_size += ggml_used_mem(ctx) + ggml_used_mem_of_data(ctx); - LOG_DEBUG("condition context need %.2fMB static memory, with work_size needing %.2fMB", - ctx_size * 1.0f / 1024 / 1024, - cplan.work_size * 1.0f / 1024 / 1024); - ggml_free(ctx); } - // allocate the required memory and compute forward struct ggml_init_params params; - params.mem_size = ctx_size; + params.mem_size = static_cast(10 * 1024) * 1024; // 10M params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; - } + params.no_alloc = false; + // LOG_DEBUG("mem_size %u ", params.mem_size); + struct ggml_context* ctx = ggml_init(params); // for alphas_cumprod and is_using_v_parameterization check + GGML_ASSERT(ctx != NULL); + ggml_tensor* alphas_cumprod_tensor = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, TIMESTEPS); + calculate_alphas_cumprod((float*)alphas_cumprod_tensor->data); - ggml_set_dynamic(ctx, false); - struct ggml_tensor* input_ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, tokens.size()); - ggml_set_dynamic(ctx, params.dynamic); + // load weights + LOG_DEBUG("loading weights"); - struct ggml_tensor* hidden_states = cond_stage_model.text_model.forward(ctx, input_ids); - struct ggml_cgraph cond_graph = ggml_build_forward(hidden_states); - LOG_DEBUG("building condition graph completed: %d nodes, %d leafs", - cond_graph.n_nodes, cond_graph.n_leafs); + int64_t t0 = ggml_time_ms(); - memcpy(input_ids->data, tokens.data(), tokens.size() * ggml_element_size(input_ids)); + std::set ignore_tensors; + tensors["alphas_cumprod"] = alphas_cumprod_tensor; + if (use_tiny_autoencoder) { + ignore_tensors.insert("first_stage_model."); + } + if (stacked_id) { + ignore_tensors.insert("lora."); + } - int64_t t0 = ggml_time_ms(); - ggml_graph_compute_with_ctx(ctx, &cond_graph, n_threads); - int64_t t1 = ggml_time_ms(); - LOG_DEBUG("computing condition graph completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + if (vae_decode_only) { + ignore_tensors.insert("first_stage_model.encoder"); + ignore_tensors.insert("first_stage_model.quant"); + } + if (version == VERSION_SVD) { + ignore_tensors.insert("conditioner.embedders.3"); + } + bool success = model_loader.load_tensors(tensors, backend, ignore_tensors); + if (!success) { + LOG_ERROR("load tensors from model loader failed"); + ggml_free(ctx); + return false; + } - ggml_tensor* result = ggml_dup_tensor(res_ctx, hidden_states); // [N, n_token, hidden_size] + // LOG_DEBUG("model size = %.2fMB", total_size / 1024.0 / 1024.0); - { - int64_t nelements = ggml_nelements(hidden_states); - float original_mean = 0.f; - float new_mean = 0.f; - float* vec = (float*)hidden_states->data; - for (int i = 0; i < nelements; i++) { - original_mean += vec[i] / nelements * 1.0f; + if (version == VERSION_SVD) { + // diffusion_model->test(); + // first_stage_model->test(); + // return false; + } else { + size_t clip_params_mem_size = cond_stage_model->get_params_buffer_size(); + size_t unet_params_mem_size = diffusion_model->get_params_buffer_size(); + size_t vae_params_mem_size = 0; + if (!use_tiny_autoencoder) { + vae_params_mem_size = first_stage_model->get_params_buffer_size(); + } else { + if (!tae_first_stage->load_from_file(taesd_path)) { + return false; + } + vae_params_mem_size = tae_first_stage->get_params_buffer_size(); } - - for (int i2 = 0; i2 < hidden_states->ne[2]; i2++) { - for (int i1 = 0; i1 < hidden_states->ne[1]; i1++) { - for (int i0 = 0; i0 < hidden_states->ne[0]; i0++) { - float value = ggml_tensor_get_f32(hidden_states, i0, i1, i2); - value *= weights[i1]; - ggml_tensor_set_f32(result, value, i0, i1, i2); - } + size_t control_net_params_mem_size = 0; + if (control_net) { + if (!control_net->load_from_file(control_net_path)) { + return false; } + control_net_params_mem_size = control_net->get_params_buffer_size(); + } + size_t pmid_params_mem_size = 0; + if (stacked_id) { + pmid_params_mem_size = pmid_model->get_params_buffer_size(); } - vec = (float*)result->data; - for (int i = 0; i < nelements; i++) { - new_mean += vec[i] / nelements * 1.0f; + size_t total_params_ram_size = 0; + size_t total_params_vram_size = 0; + if (ggml_backend_is_cpu(clip_backend)) { + total_params_ram_size += clip_params_mem_size + pmid_params_mem_size; + } else { + total_params_vram_size += clip_params_mem_size + pmid_params_mem_size; } - for (int i = 0; i < nelements; i++) { - vec[i] = vec[i] * (original_mean / new_mean); + if (ggml_backend_is_cpu(backend)) { + total_params_ram_size += unet_params_mem_size; + } else { + total_params_vram_size += unet_params_mem_size; } - } - // print_ggml_tensor(result); + if (ggml_backend_is_cpu(vae_backend)) { + total_params_ram_size += vae_params_mem_size; + } else { + total_params_vram_size += vae_params_mem_size; + } - size_t rt_mem_size = ctx_size + ggml_curr_max_dynamic_size(); - if (rt_mem_size > max_rt_mem_size) { - max_rt_mem_size = rt_mem_size; - } - size_t graph_mem_size = ggml_used_mem(clip_params_ctx) + rt_mem_size; + if (ggml_backend_is_cpu(control_net_backend)) { + total_params_ram_size += control_net_params_mem_size; + } else { + total_params_vram_size += control_net_params_mem_size; + } - size_t curr_mem_size = curr_params_mem_size + rt_mem_size; - if (curr_mem_size > max_mem_size) { - max_mem_size = curr_mem_size; + size_t total_params_size = total_params_ram_size + total_params_vram_size; + LOG_INFO( + "total params memory size = %.2fMB (VRAM %.2fMB, RAM %.2fMB): " + "clip %.2fMB(%s), unet %.2fMB(%s), vae %.2fMB(%s), controlnet %.2fMB(%s), pmid %.2fMB(%s)", + total_params_size / 1024.0 / 1024.0, + total_params_vram_size / 1024.0 / 1024.0, + total_params_ram_size / 1024.0 / 1024.0, + clip_params_mem_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(clip_backend) ? "RAM" : "VRAM", + unet_params_mem_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(backend) ? "RAM" : "VRAM", + vae_params_mem_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(vae_backend) ? "RAM" : "VRAM", + control_net_params_mem_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(control_net_backend) ? "RAM" : "VRAM", + pmid_params_mem_size / 1024.0 / 1024.0, + ggml_backend_is_cpu(clip_backend) ? "RAM" : "VRAM"); } - LOG_INFO( - "condition graph use %.2fMB of memory: params %.2fMB, " - "runtime %.2fMB (static %.2fMB, dynamic %.2fMB)", - graph_mem_size * 1.0f / 1024 / 1024, - ggml_used_mem(clip_params_ctx) * 1.0f / 1024 / 1024, - rt_mem_size * 1.0f / 1024 / 1024, - ctx_size * 1.0f / 1024 / 1024, - ggml_curr_max_dynamic_size() * 1.0f / 1024 / 1024); - - LOG_DEBUG("%zu bytes of dynamic memory has not been released yet", ggml_dynamic_size()); - - ggml_free(ctx); - - return result; // [1, 77, 768] - } + int64_t t1 = ggml_time_ms(); + LOG_INFO("loading model from '%s' completed, taking %.2fs", model_path.c_str(), (t1 - t0) * 1.0f / 1000); - ggml_tensor* sample(ggml_context* res_ctx, - ggml_tensor* x_t, - ggml_tensor* c, - ggml_tensor* uc, - float cfg_scale, - SampleMethod method, - const std::vector& sigmas) { - size_t steps = sigmas.size() - 1; - // x_t = load_tensor_from_file(res_ctx, "./rand0.bin"); - // print_ggml_tensor(x_t); - struct ggml_tensor* x_out = ggml_dup_tensor(res_ctx, x_t); - copy_ggml_tensor(x_out, x_t); + // check is_using_v_parameterization_for_sd2 - size_t ctx_size = 1 * 1024 * 1024; // 1MB - // calculate the amount of memory required - { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = true; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; + if (sd_version_is_sd2(version)) { + if (is_using_v_parameterization_for_sd2(ctx, sd_version_is_inpaint(version))) { + is_using_v_parameterization = true; } - - ggml_set_dynamic(ctx, false); - struct ggml_tensor* x = ggml_dup_tensor(ctx, x_t); - struct ggml_tensor* context = ggml_dup_tensor(ctx, c); - struct ggml_tensor* timesteps = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); // [N, ] - struct ggml_tensor* t_emb = new_timestep_embedding(ctx, timesteps, diffusion_model.model_channels); // [N, model_channels] - ggml_set_dynamic(ctx, params.dynamic); - - struct ggml_tensor* eps = diffusion_model.forward(ctx, x, NULL, context, t_emb); - ctx_size += ggml_used_mem(ctx) + ggml_used_mem_of_data(ctx); - - struct ggml_cgraph diffusion_graph = ggml_build_forward(eps); - struct ggml_cplan cplan = ggml_graph_plan(&diffusion_graph, n_threads); - - ctx_size += cplan.work_size; - LOG_DEBUG("diffusion context need %.2fMB static memory, with work_size needing %.2fMB", - ctx_size * 1.0f / 1024 / 1024, - cplan.work_size * 1.0f / 1024 / 1024); - - ggml_free(ctx); - } - - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; + } else if (sd_version_is_sdxl(version)) { + if (model_loader.tensor_storages_types.find("edm_vpred.sigma_max") != model_loader.tensor_storages_types.end()) { + // CosXL models + // TODO: get sigma_min and sigma_max values from file + is_using_edm_v_parameterization = true; + } + if (model_loader.tensor_storages_types.find("v_pred") != model_loader.tensor_storages_types.end()) { + is_using_v_parameterization = true; + } + } else if (version == VERSION_SVD) { + // TODO: V_PREDICTION_EDM + is_using_v_parameterization = true; } - ggml_set_dynamic(ctx, false); - struct ggml_tensor* x = ggml_dup_tensor(ctx, x_t); - struct ggml_tensor* context = ggml_dup_tensor(ctx, c); - struct ggml_tensor* timesteps = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); // [N, ] - struct ggml_tensor* t_emb = new_timestep_embedding(ctx, timesteps, diffusion_model.model_channels); // [N, model_channels] - ggml_set_dynamic(ctx, params.dynamic); - - struct ggml_tensor* eps = diffusion_model.forward(ctx, x, NULL, context, t_emb); - ggml_hold_dynamic_tensor(eps); - - struct ggml_cgraph diffusion_graph = ggml_build_forward(eps); - struct ggml_cplan cplan = ggml_graph_plan(&diffusion_graph, n_threads); - - ggml_set_dynamic(ctx, false); - struct ggml_tensor* buf = ggml_new_tensor_1d(ctx, GGML_TYPE_I8, cplan.work_size); - ggml_set_dynamic(ctx, params.dynamic); - - cplan.work_data = (uint8_t*)buf->data; - - // sample_euler_ancestral - { - ggml_set_dynamic(ctx, false); - struct ggml_tensor* eps_cond = NULL; - struct ggml_tensor* eps_uncond = NULL; - struct ggml_tensor* noise = ggml_dup_tensor(ctx, x_out); - if (cfg_scale != 1.0f && uc != NULL) { - eps_uncond = ggml_dup_tensor(ctx, x_out); - } - struct ggml_tensor* d = ggml_dup_tensor(ctx, x_out); - ggml_set_dynamic(ctx, params.dynamic); - - // x_out = x_out * sigmas[0] - { - float* vec = (float*)x_out->data; - for (int i = 0; i < ggml_nelements(x_out); i++) { - vec[i] = vec[i] * sigmas[0]; + if (sd_version_is_sd3(version)) { + LOG_INFO("running in FLOW mode"); + denoiser = std::make_shared(); + } else if (sd_version_is_flux(version)) { + LOG_INFO("running in Flux FLOW mode"); + float shift = 1.0f; // TODO: validate + for (auto pair : model_loader.tensor_storages_types) { + if (pair.first.find("model.diffusion_model.guidance_in.in_layer.weight") != std::string::npos) { + shift = 1.15f; + break; } } + denoiser = std::make_shared(shift); + } else if (is_using_v_parameterization) { + LOG_INFO("running in v-prediction mode"); + denoiser = std::make_shared(); + } else if (is_using_edm_v_parameterization) { + LOG_INFO("running in v-prediction EDM mode"); + denoiser = std::make_shared(); + } else { + LOG_INFO("running in eps-prediction mode"); + } - for (int i = 0; i < steps; i++) { - int64_t t0 = ggml_time_ms(); + if (schedule != DEFAULT) { + switch (schedule) { + case DISCRETE: + LOG_INFO("running with discrete schedule"); + denoiser->schedule = std::make_shared(); + break; + case KARRAS: + LOG_INFO("running with Karras schedule"); + denoiser->schedule = std::make_shared(); + break; + case EXPONENTIAL: + LOG_INFO("running exponential schedule"); + denoiser->schedule = std::make_shared(); + break; + case AYS: + LOG_INFO("Running with Align-Your-Steps schedule"); + denoiser->schedule = std::make_shared(); + denoiser->schedule->version = version; + break; + case GITS: + LOG_INFO("Running with GITS schedule"); + denoiser->schedule = std::make_shared(); + denoiser->schedule->version = version; + break; + case DEFAULT: + // Don't touch anything. + break; + default: + LOG_ERROR("Unknown schedule %i", schedule); + abort(); + } + } - copy_ggml_tensor(x, x_out); + auto comp_vis_denoiser = std::dynamic_pointer_cast(denoiser); + if (comp_vis_denoiser) { + for (int i = 0; i < TIMESTEPS; i++) { + comp_vis_denoiser->sigmas[i] = std::sqrt((1 - ((float*)alphas_cumprod_tensor->data)[i]) / ((float*)alphas_cumprod_tensor->data)[i]); + comp_vis_denoiser->log_sigmas[i] = std::log(comp_vis_denoiser->sigmas[i]); + } + } - std::pair scaling = denoiser.get_scalings(sigmas[i]); - float c_in = scaling.first; - float c_out = scaling.second; - float t = denoiser.sigma_to_t(sigmas[i]); - ggml_set_f32(timesteps, t); - set_timestep_embedding(timesteps, t_emb, diffusion_model.model_channels); + LOG_DEBUG("finished loaded file"); + ggml_free(ctx); + return true; + } - // x = x * c_in - { - float* vec = (float*)x->data; - for (int i = 0; i < ggml_nelements(x); i++) { - vec[i] = vec[i] * c_in; - } - } + bool is_using_v_parameterization_for_sd2(ggml_context* work_ctx, bool is_inpaint = false) { + struct ggml_tensor* x_t = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 8, 8, 4, 1); + ggml_set_f32(x_t, 0.5); + struct ggml_tensor* c = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 1024, 2, 1, 1); + ggml_set_f32(c, 0.5); - /*d = (x - denoised) / sigma - = (-eps_uncond * c_out - cfg_scale * (eps_cond * c_out - eps_uncond * c_out)) / sigma - = eps_uncond + cfg_scale * (eps_cond - eps_uncond)*/ - if (cfg_scale != 1.0 && uc != NULL) { - // uncond - copy_ggml_tensor(context, uc); - ggml_graph_compute(&diffusion_graph, &cplan); - copy_ggml_tensor(eps_uncond, eps); - - // cond - copy_ggml_tensor(context, c); - ggml_graph_compute(&diffusion_graph, &cplan); - - eps_cond = eps; - - /*d = (x - denoised) / sigma - = (-eps_uncond * c_out - cfg_scale * (eps_cond * c_out - eps_uncond * c_out)) / sigma - = eps_uncond + cfg_scale * (eps_cond - eps_uncond)*/ - { - float* vec_d = (float*)d->data; - float* vec_eps_uncond = (float*)eps_uncond->data; - float* vec_eps_cond = (float*)eps_cond->data; - - for (int i = 0; i < ggml_nelements(d); i++) { - vec_d[i] = vec_eps_uncond[i] + cfg_scale * (vec_eps_cond[i] - vec_eps_uncond[i]); - } - } - } else { - // cond - copy_ggml_tensor(context, c); - ggml_graph_compute(&diffusion_graph, &cplan); - copy_ggml_tensor(d, eps); - } + struct ggml_tensor* timesteps = ggml_new_tensor_1d(work_ctx, GGML_TYPE_F32, 1); + ggml_set_f32(timesteps, 999); - // get_ancestral_step - float sigma_up = std::min(sigmas[i + 1], - std::sqrt(sigmas[i + 1] * sigmas[i + 1] * (sigmas[i] * sigmas[i] - sigmas[i + 1] * sigmas[i + 1]) / (sigmas[i] * sigmas[i]))); - float sigma_down = std::sqrt(sigmas[i + 1] * sigmas[i + 1] - sigma_up * sigma_up); + struct ggml_tensor* concat = is_inpaint ? ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, 8, 8, 5, 1) : NULL; + if (concat != NULL) { + ggml_set_f32(concat, 0); + } - // Euler method - float dt = sigma_down - sigmas[i]; - // x = x + d * dt - { - float* vec_d = (float*)d->data; - float* vec_x = (float*)x_out->data; + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* out = ggml_dup_tensor(work_ctx, x_t); + diffusion_model->compute(n_threads, x_t, timesteps, c, concat, NULL, NULL, {}, -1, {}, 0.f, &out); + diffusion_model->free_compute_buffer(); - for (int i = 0; i < ggml_nelements(x_out); i++) { - vec_x[i] = vec_x[i] + vec_d[i] * dt; - } - } + double result = 0.f; + { + float* vec_x = (float*)x_t->data; + float* vec_out = (float*)out->data; - if (sigmas[i + 1] > 0) { - // x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up - ggml_tensor_set_f32_randn(noise); - // noise = load_tensor_from_file(res_ctx, "./rand" + std::to_string(i+1) + ".bin"); - { - float* vec_x = (float*)x_out->data; - float* vec_noise = (float*)noise->data; + int64_t n = ggml_nelements(out); - for (int i = 0; i < ggml_nelements(x_out); i++) { - vec_x[i] = vec_x[i] + vec_noise[i] * sigma_up; - } - } - } - int64_t t1 = ggml_time_ms(); - LOG_INFO("step %d sampling completed, taking %.2fs", i + 1, (t1 - t0) * 1.0f / 1000); - LOG_DEBUG("diffusion graph use %.2fMB runtime memory: static %.2fMB, dynamic %.2fMB", - (ctx_size + ggml_curr_max_dynamic_size()) * 1.0f / 1024 / 1024, - ctx_size * 1.0f / 1024 / 1024, - ggml_curr_max_dynamic_size() * 1.0f / 1024 / 1024); - LOG_DEBUG("%zu bytes of dynamic memory has not been released yet", ggml_dynamic_size()); + for (int i = 0; i < n; i++) { + result += ((double)vec_out[i] - (double)vec_x[i]); } + result /= n; } - - size_t rt_mem_size = ctx_size + ggml_curr_max_dynamic_size(); - if (rt_mem_size > max_rt_mem_size) { - max_rt_mem_size = rt_mem_size; + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("check is_using_v_parameterization_for_sd2, taking %.2fs", (t1 - t0) * 1.0f / 1000); + return result < -1; + } + + void apply_lora(const std::string& lora_name, float multiplier) { + int64_t t0 = ggml_time_ms(); + std::string st_file_path = path_join(lora_model_dir, lora_name + ".safetensors"); + std::string ckpt_file_path = path_join(lora_model_dir, lora_name + ".ckpt"); + std::string file_path; + if (file_exists(st_file_path)) { + file_path = st_file_path; + } else if (file_exists(ckpt_file_path)) { + file_path = ckpt_file_path; + } else { + LOG_WARN("can not find %s or %s for lora %s", st_file_path.c_str(), ckpt_file_path.c_str(), lora_name.c_str()); + return; } - size_t graph_mem_size = ggml_used_mem(unet_params_ctx) + rt_mem_size; - - size_t curr_mem_size = curr_params_mem_size + rt_mem_size; - if (curr_mem_size > max_mem_size) { - max_mem_size = curr_mem_size; + LoraModel lora(backend, file_path); + if (!lora.load_from_file()) { + LOG_WARN("load lora tensors from %s failed", file_path.c_str()); + return; } - LOG_INFO( - "diffusion graph use %.2fMB of memory: params %.2fMB, " - "runtime %.2fMB (static %.2fMB, dynamic %.2fMB)", - graph_mem_size * 1.0f / 1024 / 1024, - ggml_used_mem(unet_params_ctx) * 1.0f / 1024 / 1024, - rt_mem_size * 1.0f / 1024 / 1024, - ctx_size * 1.0f / 1024 / 1024, - ggml_curr_max_dynamic_size() * 1.0f / 1024 / 1024); - LOG_DEBUG("%zu bytes of dynamic memory has not been released yet", ggml_dynamic_size()); + lora.multiplier = multiplier; + // TODO: send version? + lora.apply(tensors, version, n_threads); + lora.free_params_buffer(); - ggml_free(ctx); + int64_t t1 = ggml_time_ms(); - return x_out; + LOG_INFO("lora '%s' applied, taking %.2fs", lora_name.c_str(), (t1 - t0) * 1.0f / 1000); } - ggml_tensor* encode_first_stage(ggml_context* res_ctx, ggml_tensor* x) { - int64_t W = x->ne[0]; - int64_t H = x->ne[1]; - struct ggml_tensor* result = NULL; + void apply_loras(const std::unordered_map& lora_state) { + if (lora_state.size() > 0 && model_wtype != GGML_TYPE_F16 && model_wtype != GGML_TYPE_F32) { + LOG_WARN("In quantized models when applying LoRA, the images have poor quality."); + } + std::unordered_map lora_state_diff; + for (auto& kv : lora_state) { + const std::string& lora_name = kv.first; + float multiplier = kv.second; + lora_state_diff[lora_name] += multiplier; + } + for (auto& kv : curr_lora_state) { + const std::string& lora_name = kv.first; + float curr_multiplier = kv.second; + lora_state_diff[lora_name] -= curr_multiplier; + } - // calculate the amount of memory required - size_t ctx_size = 1 * 1024 * 1024; - { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = true; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; - } + size_t rm = lora_state_diff.size() - lora_state.size(); + if (rm != 0) { + LOG_INFO("Attempting to apply %lu LoRAs (removing %lu applied LoRAs)", lora_state.size(), rm); + } else { + LOG_INFO("Attempting to apply %lu LoRAs", lora_state.size()); + } - struct ggml_tensor* moments = first_stage_model.encode(ctx, x); - ctx_size += ggml_used_mem(ctx) + ggml_used_mem_of_data(ctx); + for (auto& kv : lora_state_diff) { + apply_lora(kv.first, kv.second); + } - struct ggml_cgraph vae_graph = ggml_build_forward(moments); - struct ggml_cplan cplan = ggml_graph_plan(&vae_graph, n_threads); + curr_lora_state = lora_state; + } - ctx_size += cplan.work_size; - LOG_DEBUG("vae context need %.2fMB static memory, with work_size needing %.2fMB", - ctx_size * 1.0f / 1024 / 1024, - cplan.work_size * 1.0f / 1024 / 1024); + ggml_tensor* id_encoder(ggml_context* work_ctx, + ggml_tensor* init_img, + ggml_tensor* prompts_embeds, + ggml_tensor* id_embeds, + std::vector& class_tokens_mask) { + ggml_tensor* res = NULL; + pmid_model->compute(n_threads, init_img, prompts_embeds, id_embeds, class_tokens_mask, &res, work_ctx); + return res; + } - ggml_free(ctx); + SDCondition get_svd_condition(ggml_context* work_ctx, + sd_image_t init_image, + int width, + int height, + int fps = 6, + int motion_bucket_id = 127, + float augmentation_level = 0.f, + bool force_zero_embeddings = false) { + // c_crossattn + int64_t t0 = ggml_time_ms(); + struct ggml_tensor* c_crossattn = NULL; + { + if (force_zero_embeddings) { + c_crossattn = ggml_new_tensor_1d(work_ctx, GGML_TYPE_F32, clip_vision->vision_model.projection_dim); + ggml_set_f32(c_crossattn, 0.f); + } else { + sd_image_f32_t image = sd_image_t_to_sd_image_f32_t(init_image); + sd_image_f32_t resized_image = clip_preprocess(image, clip_vision->vision_model.image_size); + free(image.data); + image.data = NULL; + + ggml_tensor* pixel_values = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, resized_image.width, resized_image.height, 3, 1); + sd_image_f32_to_tensor(resized_image.data, pixel_values, false); + free(resized_image.data); + resized_image.data = NULL; + + // print_ggml_tensor(pixel_values); + clip_vision->compute(n_threads, pixel_values, &c_crossattn, work_ctx); + // print_ggml_tensor(c_crossattn); + } } + // c_concat + struct ggml_tensor* c_concat = NULL; { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; + if (force_zero_embeddings) { + c_concat = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width / 8, height / 8, 4, 1); + ggml_set_f32(c_concat, 0.f); + } else { + ggml_tensor* init_img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 3, 1); + + if (width != init_image.width || height != init_image.height) { + sd_image_f32_t image = sd_image_t_to_sd_image_f32_t(init_image); + sd_image_f32_t resized_image = resize_sd_image_f32_t(image, width, height); + free(image.data); + image.data = NULL; + sd_image_f32_to_tensor(resized_image.data, init_img, false); + free(resized_image.data); + resized_image.data = NULL; + } else { + sd_image_to_tensor(init_image.data, init_img); + } + if (augmentation_level > 0.f) { + struct ggml_tensor* noise = ggml_dup_tensor(work_ctx, init_img); + ggml_tensor_set_f32_randn(noise, rng); + // encode_pixels += torch.randn_like(pixels) * augmentation_level + ggml_tensor_scale(noise, augmentation_level); + ggml_tensor_add(init_img, noise); + } + ggml_tensor* moments = encode_first_stage(work_ctx, init_img); + c_concat = get_first_stage_encoding(work_ctx, moments); } + } + + // y + struct ggml_tensor* y = NULL; + { + y = ggml_new_tensor_1d(work_ctx, GGML_TYPE_F32, diffusion_model->get_adm_in_channels()); + int out_dim = 256; + int fps_id = fps - 1; + std::vector timesteps = {(float)fps_id, (float)motion_bucket_id, augmentation_level}; + set_timestep_embedding(timesteps, y, out_dim); + } + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing svd condition graph completed, taking %" PRId64 " ms", t1 - t0); + return {c_crossattn, y, c_concat}; + } + + ggml_tensor* sample(ggml_context* work_ctx, + ggml_tensor* init_latent, + ggml_tensor* noise, + SDCondition cond, + SDCondition uncond, + ggml_tensor* control_hint, + float control_strength, + float min_cfg, + float cfg_scale, + float guidance, + float eta, + sample_method_t method, + const std::vector& sigmas, + int start_merge_step, + SDCondition id_cond, + std::vector ref_latents = {}, + std::vector skip_layers = {}, + float slg_scale = 0, + float skip_layer_start = 0.01, + float skip_layer_end = 0.2, + ggml_tensor* noise_mask = nullptr) { + LOG_DEBUG("Sample"); + struct ggml_init_params params; + size_t data_size = ggml_row_size(init_latent->type, init_latent->ne[0]); + for (int i = 1; i < 4; i++) { + data_size *= init_latent->ne[i]; + } + data_size += 1024; + params.mem_size = data_size * 3; + params.mem_buffer = NULL; + params.no_alloc = false; + ggml_context* tmp_ctx = ggml_init(params); - struct ggml_tensor* moments = first_stage_model.encode(ctx, x); - struct ggml_cgraph vae_graph = ggml_build_forward(moments); + size_t steps = sigmas.size() - 1; + // noise = load_tensor_from_file(work_ctx, "./rand0.bin"); + // print_ggml_tensor(noise); + struct ggml_tensor* x = ggml_dup_tensor(work_ctx, init_latent); + copy_ggml_tensor(x, init_latent); + x = denoiser->noise_scaling(sigmas[0], noise, x); - int64_t t0 = ggml_time_ms(); - ggml_graph_compute_with_ctx(ctx, &vae_graph, n_threads); - int64_t t1 = ggml_time_ms(); - LOG_DEBUG("computing vae graph completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + struct ggml_tensor* noised_input = ggml_dup_tensor(work_ctx, noise); - result = ggml_dup_tensor(res_ctx, moments); - copy_ggml_tensor(result, moments); + bool has_unconditioned = cfg_scale != 1.0 && uncond.c_crossattn != NULL; + bool has_skiplayer = slg_scale != 0.0 && skip_layers.size() > 0; - size_t rt_mem_size = ctx_size + ggml_curr_max_dynamic_size(); - if (rt_mem_size > max_rt_mem_size) { - max_rt_mem_size = rt_mem_size; - } - size_t graph_mem_size = ggml_used_mem(vae_params_ctx) + rt_mem_size; + // denoise wrapper + struct ggml_tensor* out_cond = ggml_dup_tensor(work_ctx, x); + struct ggml_tensor* out_uncond = NULL; + struct ggml_tensor* out_skip = NULL; - size_t curr_mem_size = curr_params_mem_size + rt_mem_size; - if (curr_mem_size > max_mem_size) { - max_mem_size = curr_mem_size; + if (has_unconditioned) { + out_uncond = ggml_dup_tensor(work_ctx, x); + } + if (has_skiplayer) { + if (sd_version_is_dit(version)) { + out_skip = ggml_dup_tensor(work_ctx, x); + } else { + has_skiplayer = false; + LOG_WARN("SLG is incompatible with %s models", model_version_to_str[version]); + } + } + struct ggml_tensor* denoised = ggml_dup_tensor(work_ctx, x); + + auto denoise = [&](ggml_tensor* input, float sigma, int step) -> ggml_tensor* { + if (step == 1) { + pretty_progress(0, (int)steps, 0); + } + int64_t t0 = ggml_time_us(); + + std::vector scaling = denoiser->get_scalings(sigma); + GGML_ASSERT(scaling.size() == 3); + float c_skip = scaling[0]; + float c_out = scaling[1]; + float c_in = scaling[2]; + + float t = denoiser->sigma_to_t(sigma); + std::vector timesteps_vec(x->ne[3], t); // [N, ] + auto timesteps = vector_to_ggml_tensor(work_ctx, timesteps_vec); + std::vector guidance_vec(x->ne[3], guidance); + auto guidance_tensor = vector_to_ggml_tensor(work_ctx, guidance_vec); + + copy_ggml_tensor(noised_input, input); + // noised_input = noised_input * c_in + ggml_tensor_scale(noised_input, c_in); + + std::vector controls; + + if (control_hint != NULL) { + control_net->compute(n_threads, noised_input, control_hint, timesteps, cond.c_crossattn, cond.c_vector); + controls = control_net->controls; + // print_ggml_tensor(controls[12]); + // GGML_ASSERT(0); + } + + if (start_merge_step == -1 || step <= start_merge_step) { + // cond + diffusion_model->compute(n_threads, + noised_input, + timesteps, + cond.c_crossattn, + cond.c_concat, + cond.c_vector, + guidance_tensor, + ref_latents, + -1, + controls, + control_strength, + &out_cond); + } else { + diffusion_model->compute(n_threads, + noised_input, + timesteps, + id_cond.c_crossattn, + cond.c_concat, + id_cond.c_vector, + guidance_tensor, + ref_latents, + -1, + controls, + control_strength, + &out_cond); + } + + float* negative_data = NULL; + if (has_unconditioned) { + // uncond + if (control_hint != NULL) { + control_net->compute(n_threads, noised_input, control_hint, timesteps, uncond.c_crossattn, uncond.c_vector); + controls = control_net->controls; + } + diffusion_model->compute(n_threads, + noised_input, + timesteps, + uncond.c_crossattn, + uncond.c_concat, + uncond.c_vector, + guidance_tensor, + ref_latents, + -1, + controls, + control_strength, + &out_uncond); + negative_data = (float*)out_uncond->data; + } + + int step_count = sigmas.size(); + bool is_skiplayer_step = has_skiplayer && step > (int)(skip_layer_start * step_count) && step < (int)(skip_layer_end * step_count); + float* skip_layer_data = NULL; + if (is_skiplayer_step) { + LOG_DEBUG("Skipping layers at step %d\n", step); + // skip layer (same as conditionned) + diffusion_model->compute(n_threads, + noised_input, + timesteps, + cond.c_crossattn, + cond.c_concat, + cond.c_vector, + guidance_tensor, + ref_latents, + -1, + controls, + control_strength, + &out_skip, + NULL, + skip_layers); + skip_layer_data = (float*)out_skip->data; + } + float* vec_denoised = (float*)denoised->data; + float* vec_input = (float*)input->data; + float* positive_data = (float*)out_cond->data; + int ne_elements = (int)ggml_nelements(denoised); + for (int i = 0; i < ne_elements; i++) { + float latent_result = positive_data[i]; + if (has_unconditioned) { + // out_uncond + cfg_scale * (out_cond - out_uncond) + int64_t ne3 = out_cond->ne[3]; + if (min_cfg != cfg_scale && ne3 != 1) { + int64_t i3 = i / out_cond->ne[0] * out_cond->ne[1] * out_cond->ne[2]; + float scale = min_cfg + (cfg_scale - min_cfg) * (i3 * 1.0f / ne3); + } else { + latent_result = negative_data[i] + cfg_scale * (positive_data[i] - negative_data[i]); + } + } + if (is_skiplayer_step) { + latent_result = latent_result + (positive_data[i] - skip_layer_data[i]) * slg_scale; + } + // v = latent_result, eps = latent_result + // denoised = (v * c_out + input * c_skip) or (input + eps * c_out) + vec_denoised[i] = latent_result * c_out + vec_input[i] * c_skip; + } + int64_t t1 = ggml_time_us(); + if (step > 0) { + pretty_progress(step, (int)steps, (t1 - t0) / 1000000.f); + // LOG_INFO("step %d sampling completed taking %.2fs", step, (t1 - t0) * 1.0f / 1000000); + } + if (noise_mask != nullptr) { + for (int64_t x = 0; x < denoised->ne[0]; x++) { + for (int64_t y = 0; y < denoised->ne[1]; y++) { + float mask = ggml_tensor_get_f32(noise_mask, x, y); + for (int64_t k = 0; k < denoised->ne[2]; k++) { + float init = ggml_tensor_get_f32(init_latent, x, y, k); + float den = ggml_tensor_get_f32(denoised, x, y, k); + ggml_tensor_set_f32(denoised, init + mask * (den - init), x, y, k); + } + } + } } - LOG_INFO( - "vae graph use %.2fMB of memory: params %.2fMB, " - "runtime %.2fMB (static %.2fMB, dynamic %.2fMB)", - graph_mem_size * 1.0f / 1024 / 1024, - ggml_used_mem(vae_params_ctx) * 1.0f / 1024 / 1024, - rt_mem_size * 1.0f / 1024 / 1024, - ctx_size * 1.0f / 1024 / 1024, - ggml_curr_max_dynamic_size() * 1.0f / 1024 / 1024); - LOG_DEBUG("%zu bytes of dynamic memory has not been released yet", ggml_dynamic_size()); + return denoised; + }; - ggml_free(ctx); - } + sample_k_diffusion(method, denoise, work_ctx, x, sigmas, rng, eta); - return result; + x = denoiser->inverse_noise_scaling(sigmas[sigmas.size() - 1], x); + + if (control_net) { + control_net->free_control_ctx(); + control_net->free_compute_buffer(); + } + diffusion_model->free_compute_buffer(); + return x; } // ldm.models.diffusion.ddpm.LatentDiffusion.get_first_stage_encoding - ggml_tensor* get_first_stage_encoding(ggml_context* res_ctx, ggml_tensor* moments) { + ggml_tensor* get_first_stage_encoding(ggml_context* work_ctx, ggml_tensor* moments) { // ldm.modules.distributions.distributions.DiagonalGaussianDistribution.sample - ggml_tensor* latent = ggml_new_tensor_4d(res_ctx, moments->type, moments->ne[0], - moments->ne[1], moments->ne[2] / 2, moments->ne[3]); - struct ggml_tensor* noise = ggml_dup_tensor(res_ctx, latent); - ggml_tensor_set_f32_randn(noise); - // noise = load_tensor_from_file(res_ctx, "noise.bin"); + ggml_tensor* latent = ggml_new_tensor_4d(work_ctx, moments->type, moments->ne[0], moments->ne[1], moments->ne[2] / 2, moments->ne[3]); + struct ggml_tensor* noise = ggml_dup_tensor(work_ctx, latent); + ggml_tensor_set_f32_randn(noise, rng); + // noise = load_tensor_from_file(work_ctx, "noise.bin"); { - float mean = 0; + float mean = 0; float logvar = 0; - float value = 0; - float std_ = 0; + float value = 0; + float std_ = 0; for (int i = 0; i < latent->ne[3]; i++) { for (int j = 0; j < latent->ne[2]; j++) { for (int k = 0; k < latent->ne[1]; k++) { for (int l = 0; l < latent->ne[0]; l++) { - mean = ggml_tensor_get_f32(moments, l, k, j, i); + mean = ggml_tensor_get_f32(moments, l, k, j, i); logvar = ggml_tensor_get_f32(moments, l, k, j + (int)latent->ne[2], i); logvar = std::max(-30.0f, std::min(logvar, 20.0f)); - std_ = std::exp(0.5f * logvar); - value = mean + std_ * ggml_tensor_get_f32(noise, l, k, j, i); - value = value * scale_factor; + std_ = std::exp(0.5f * logvar); + value = mean + std_ * ggml_tensor_get_f32(noise, l, k, j, i); + value = value * scale_factor; // printf("%d %d %d %d -> %f\n", i, j, k, l, value); ggml_tensor_set_f32(latent, value, l, k, j, i); } @@ -3410,297 +1074,1042 @@ class StableDiffusionGGML { return latent; } - ggml_tensor* decode_first_stage(ggml_context* res_ctx, ggml_tensor* z) { - int64_t W = z->ne[0]; - int64_t H = z->ne[1]; - struct ggml_tensor* result_img = NULL; - - { - float* vec = (float*)z->data; - for (int i = 0; i < ggml_nelements(z); i++) { - vec[i] = 1.0f / scale_factor * vec[i]; + ggml_tensor* compute_first_stage(ggml_context* work_ctx, ggml_tensor* x, bool decode) { + int64_t W = x->ne[0]; + int64_t H = x->ne[1]; + int64_t C = 8; + if (use_tiny_autoencoder) { + C = 4; + } else { + if (sd_version_is_sd3(version)) { + C = 32; + } else if (sd_version_is_flux(version)) { + C = 32; + } + } + ggml_tensor* result = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, + decode ? (W * 8) : (W / 8), // width + decode ? (H * 8) : (H / 8), // height + decode ? 3 : C, + x->ne[3]); // channels + int64_t t0 = ggml_time_ms(); + if (!use_tiny_autoencoder) { + if (decode) { + ggml_tensor_scale(x, 1.0f / scale_factor); + } else { + ggml_tensor_scale_input(x); + } + if (vae_tiling && decode) { // TODO: support tiling vae encode + // split latent in 32x32 tiles and compute in several steps + auto on_tiling = [&](ggml_tensor* in, ggml_tensor* out, bool init) { + first_stage_model->compute(n_threads, in, decode, &out); + }; + sd_tiling(x, result, 8, 32, 0.5f, on_tiling); + } else { + first_stage_model->compute(n_threads, x, decode, &result); + } + first_stage_model->free_compute_buffer(); + if (decode) { + ggml_tensor_scale_output(result); } + } else { + if (vae_tiling && decode) { // TODO: support tiling vae encode + // split latent in 64x64 tiles and compute in several steps + auto on_tiling = [&](ggml_tensor* in, ggml_tensor* out, bool init) { + tae_first_stage->compute(n_threads, in, decode, &out); + }; + sd_tiling(x, result, 8, 64, 0.5f, on_tiling); + } else { + tae_first_stage->compute(n_threads, x, decode, &result); + } + tae_first_stage->free_compute_buffer(); } - // calculate the amount of memory required - size_t ctx_size = 1 * 1024 * 1024; - { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = true; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; - } + int64_t t1 = ggml_time_ms(); + LOG_DEBUG("computing vae [mode: %s] graph completed, taking %.2fs", decode ? "DECODE" : "ENCODE", (t1 - t0) * 1.0f / 1000); + if (decode) { + ggml_tensor_clamp(result, 0.0f, 1.0f); + } + return result; + } + + ggml_tensor* encode_first_stage(ggml_context* work_ctx, ggml_tensor* x) { + return compute_first_stage(work_ctx, x, false); + } + + ggml_tensor* decode_first_stage(ggml_context* work_ctx, ggml_tensor* x) { + return compute_first_stage(work_ctx, x, true); + } +}; - struct ggml_tensor* img = first_stage_model.decoder.forward(ctx, z); - ctx_size += ggml_used_mem(ctx) + ggml_used_mem_of_data(ctx); +/*================================================= SD API ==================================================*/ - struct ggml_cgraph vae_graph = ggml_build_forward(img); - struct ggml_cplan cplan = ggml_graph_plan(&vae_graph, n_threads); +struct sd_ctx_t { + StableDiffusionGGML* sd = NULL; +}; - ctx_size += cplan.work_size; - LOG_DEBUG("vae context need %.2fMB static memory, with work_size needing %.2fMB", - ctx_size * 1.0f / 1024 / 1024, - cplan.work_size * 1.0f / 1024 / 1024); +sd_ctx_t* new_sd_ctx(const char* model_path_c_str, + const char* clip_l_path_c_str, + const char* clip_g_path_c_str, + const char* t5xxl_path_c_str, + const char* diffusion_model_path_c_str, + const char* vae_path_c_str, + const char* taesd_path_c_str, + const char* control_net_path_c_str, + const char* lora_model_dir_c_str, + const char* embed_dir_c_str, + const char* id_embed_dir_c_str, + bool vae_decode_only, + bool vae_tiling, + bool free_params_immediately, + int n_threads, + enum sd_type_t wtype, + enum rng_type_t rng_type, + enum schedule_t s, + bool keep_clip_on_cpu, + bool keep_control_net_cpu, + bool keep_vae_on_cpu, + bool diffusion_flash_attn, + bool chroma_use_dit_mask, + bool chroma_use_t5_mask, + int chroma_t5_mask_pad) { + sd_ctx_t* sd_ctx = (sd_ctx_t*)malloc(sizeof(sd_ctx_t)); + if (sd_ctx == NULL) { + return NULL; + } + std::string model_path(model_path_c_str); + std::string clip_l_path(clip_l_path_c_str); + std::string clip_g_path(clip_g_path_c_str); + std::string t5xxl_path(t5xxl_path_c_str); + std::string diffusion_model_path(diffusion_model_path_c_str); + std::string vae_path(vae_path_c_str); + std::string taesd_path(taesd_path_c_str); + std::string control_net_path(control_net_path_c_str); + std::string embd_path(embed_dir_c_str); + std::string id_embd_path(id_embed_dir_c_str); + std::string lora_model_dir(lora_model_dir_c_str); + + sd_ctx->sd = new StableDiffusionGGML(n_threads, + vae_decode_only, + free_params_immediately, + lora_model_dir, + rng_type); + if (sd_ctx->sd == NULL) { + return NULL; + } - ggml_free(ctx); - } + if (!sd_ctx->sd->load_from_file(model_path, + clip_l_path, + clip_g_path, + t5xxl_path_c_str, + diffusion_model_path, + vae_path, + control_net_path, + embd_path, + id_embd_path, + taesd_path, + vae_tiling, + (ggml_type)wtype, + s, + keep_clip_on_cpu, + keep_control_net_cpu, + keep_vae_on_cpu, + diffusion_flash_attn, + chroma_use_dit_mask, + chroma_use_t5_mask, + chroma_t5_mask_pad)) { + delete sd_ctx->sd; + sd_ctx->sd = NULL; + free(sd_ctx); + return NULL; + } + return sd_ctx; +} - { - struct ggml_init_params params; - params.mem_size = ctx_size; - params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = dynamic; - - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { - LOG_ERROR("ggml_init() failed"); - return NULL; - } +void free_sd_ctx(sd_ctx_t* sd_ctx) { + if (sd_ctx->sd != NULL) { + delete sd_ctx->sd; + sd_ctx->sd = NULL; + } + free(sd_ctx); +} - struct ggml_tensor* img = first_stage_model.decode(ctx, z); - struct ggml_cgraph vae_graph = ggml_build_forward(img); +sd_image_t* generate_image(sd_ctx_t* sd_ctx, + struct ggml_context* work_ctx, + ggml_tensor* init_latent, + std::string prompt, + std::string negative_prompt, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + enum sample_method_t sample_method, + const std::vector& sigmas, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_ratio, + bool normalize_input, + std::string input_id_images_path, + std::vector ref_latents, + std::vector skip_layers = {}, + float slg_scale = 0, + float skip_layer_start = 0.01, + float skip_layer_end = 0.2, + ggml_tensor* masked_image = NULL) { + if (seed < 0) { + // Generally, when using the provided command line, the seed is always >0. + // However, to prevent potential issues if 'stable-diffusion.cpp' is invoked as a library + // by a third party with a seed <0, let's incorporate randomization here. + srand((int)time(NULL)); + seed = rand(); + } - int64_t t0 = ggml_time_ms(); - ggml_graph_compute_with_ctx(ctx, &vae_graph, n_threads); - int64_t t1 = ggml_time_ms(); - LOG_DEBUG("computing vae graph completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + // for (auto v : sigmas) { + // std::cout << v << " "; + // } + // std::cout << std::endl; - result_img = ggml_dup_tensor(res_ctx, img); - copy_ggml_tensor(result_img, img); + int sample_steps = sigmas.size() - 1; - size_t rt_mem_size = ctx_size + ggml_curr_max_dynamic_size(); - if (rt_mem_size > max_rt_mem_size) { - max_rt_mem_size = rt_mem_size; - } - size_t graph_mem_size = ggml_used_mem(vae_params_ctx) + rt_mem_size; + // Apply lora + auto result_pair = extract_and_remove_lora(prompt); + std::unordered_map lora_f2m = result_pair.first; // lora_name -> multiplier - size_t curr_mem_size = curr_params_mem_size + rt_mem_size; - if (curr_mem_size > max_mem_size) { - max_mem_size = curr_mem_size; - } + for (auto& kv : lora_f2m) { + LOG_DEBUG("lora %s:%.2f", kv.first.c_str(), kv.second); + } - LOG_INFO( - "vae graph use %.2fMB of memory: params %.2fMB, " - "runtime %.2fMB (static %.2fMB, dynamic %.2fMB)", - graph_mem_size * 1.0f / 1024 / 1024, - ggml_used_mem(vae_params_ctx) * 1.0f / 1024 / 1024, - rt_mem_size * 1.0f / 1024 / 1024, - ctx_size * 1.0f / 1024 / 1024, - ggml_curr_max_dynamic_size() * 1.0f / 1024 / 1024); - LOG_DEBUG("%zu bytes of dynamic memory has not been released yet", ggml_dynamic_size()); + prompt = result_pair.second; + LOG_DEBUG("prompt after extract and remove lora: \"%s\"", prompt.c_str()); - ggml_free(ctx); + int64_t t0 = ggml_time_ms(); + sd_ctx->sd->apply_loras(lora_f2m); + int64_t t1 = ggml_time_ms(); + LOG_INFO("apply_loras completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + + // Photo Maker + std::string prompt_text_only; + ggml_tensor* init_img = NULL; + SDCondition id_cond; + std::vector class_tokens_mask; + if (sd_ctx->sd->stacked_id) { + if (!sd_ctx->sd->pmid_lora->applied) { + t0 = ggml_time_ms(); + sd_ctx->sd->pmid_lora->apply(sd_ctx->sd->tensors, sd_ctx->sd->version, sd_ctx->sd->n_threads); + t1 = ggml_time_ms(); + sd_ctx->sd->pmid_lora->applied = true; + LOG_INFO("pmid_lora apply completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->pmid_lora->free_params_buffer(); + } + } + // preprocess input id images + std::vector input_id_images; + bool pmv2 = sd_ctx->sd->pmid_model->get_version() == PM_VERSION_2; + if (sd_ctx->sd->pmid_model && input_id_images_path.size() > 0) { + std::vector img_files = get_files_from_dir(input_id_images_path); + for (std::string img_file : img_files) { + int c = 0; + int width, height; + if (ends_with(img_file, "safetensors")) { + continue; + } + uint8_t* input_image_buffer = stbi_load(img_file.c_str(), &width, &height, &c, 3); + if (input_image_buffer == NULL) { + LOG_ERROR("PhotoMaker load image from '%s' failed", img_file.c_str()); + continue; + } else { + LOG_INFO("PhotoMaker loaded image from '%s'", img_file.c_str()); + } + sd_image_t* input_image = NULL; + input_image = new sd_image_t{(uint32_t)width, + (uint32_t)height, + 3, + input_image_buffer}; + input_image = preprocess_id_image(input_image); + if (input_image == NULL) { + LOG_ERROR("preprocess input id image from '%s' failed", img_file.c_str()); + continue; + } + input_id_images.push_back(input_image); + } + } + if (input_id_images.size() > 0) { + sd_ctx->sd->pmid_model->style_strength = style_ratio; + int32_t w = input_id_images[0]->width; + int32_t h = input_id_images[0]->height; + int32_t channels = input_id_images[0]->channel; + int32_t num_input_images = (int32_t)input_id_images.size(); + init_img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, w, h, channels, num_input_images); + // TODO: move these to somewhere else and be user settable + float mean[] = {0.48145466f, 0.4578275f, 0.40821073f}; + float std[] = {0.26862954f, 0.26130258f, 0.27577711f}; + for (int i = 0; i < num_input_images; i++) { + sd_image_t* init_image = input_id_images[i]; + if (normalize_input) + sd_mul_images_to_tensor(init_image->data, init_img, i, mean, std); + else + sd_mul_images_to_tensor(init_image->data, init_img, i, NULL, NULL); + } + t0 = ggml_time_ms(); + auto cond_tup = sd_ctx->sd->cond_stage_model->get_learned_condition_with_trigger(work_ctx, + sd_ctx->sd->n_threads, prompt, + clip_skip, + width, + height, + num_input_images, + sd_ctx->sd->diffusion_model->get_adm_in_channels()); + id_cond = std::get<0>(cond_tup); + class_tokens_mask = std::get<1>(cond_tup); // + struct ggml_tensor* id_embeds = NULL; + if (pmv2) { + // id_embeds = sd_ctx->sd->pmid_id_embeds->get(); + id_embeds = load_tensor_from_file(work_ctx, path_join(input_id_images_path, "id_embeds.bin")); + // print_ggml_tensor(id_embeds, true, "id_embeds:"); + } + id_cond.c_crossattn = sd_ctx->sd->id_encoder(work_ctx, init_img, id_cond.c_crossattn, id_embeds, class_tokens_mask); + t1 = ggml_time_ms(); + LOG_INFO("Photomaker ID Stacking, taking %" PRId64 " ms", t1 - t0); + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->pmid_model->free_params_buffer(); + } + // Encode input prompt without the trigger word for delayed conditioning + prompt_text_only = sd_ctx->sd->cond_stage_model->remove_trigger_from_prompt(work_ctx, prompt); + // printf("%s || %s \n", prompt.c_str(), prompt_text_only.c_str()); + prompt = prompt_text_only; // + // if (sample_steps < 50) { + // LOG_INFO("sampling steps increases from %d to 50 for PHOTOMAKER", sample_steps); + // sample_steps = 50; + // } + } else { + LOG_WARN("Provided PhotoMaker model file, but NO input ID images"); + LOG_WARN("Turn off PhotoMaker"); + sd_ctx->sd->stacked_id = false; + } + for (sd_image_t* img : input_id_images) { + free(img->data); } + input_id_images.clear(); + } + + // Get learned condition + t0 = ggml_time_ms(); + SDCondition cond = sd_ctx->sd->cond_stage_model->get_learned_condition(work_ctx, + sd_ctx->sd->n_threads, + prompt, + clip_skip, + width, + height, + sd_ctx->sd->diffusion_model->get_adm_in_channels()); - return result_img; + SDCondition uncond; + if (cfg_scale != 1.0) { + bool force_zero_embeddings = false; + if (sd_version_is_sdxl(sd_ctx->sd->version) && negative_prompt.size() == 0 && !sd_ctx->sd->is_using_edm_v_parameterization) { + force_zero_embeddings = true; + } + uncond = sd_ctx->sd->cond_stage_model->get_learned_condition(work_ctx, + sd_ctx->sd->n_threads, + negative_prompt, + clip_skip, + width, + height, + sd_ctx->sd->diffusion_model->get_adm_in_channels(), + force_zero_embeddings); + } + t1 = ggml_time_ms(); + LOG_INFO("get_learned_condition completed, taking %" PRId64 " ms", t1 - t0); + + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->cond_stage_model->free_params_buffer(); + } + + // Control net hint + struct ggml_tensor* image_hint = NULL; + if (control_cond != NULL) { + image_hint = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 3, 1); + sd_image_to_tensor(control_cond->data, image_hint); + } + + // Sample + std::vector final_latents; // collect latents to decode + int C = 4; + if (sd_version_is_sd3(sd_ctx->sd->version)) { + C = 16; + } else if (sd_version_is_flux(sd_ctx->sd->version)) { + C = 16; + } + int W = width / 8; + int H = height / 8; + LOG_INFO("sampling using %s method", sampling_methods_str[sample_method]); + ggml_tensor* noise_mask = nullptr; + if (sd_version_is_inpaint(sd_ctx->sd->version)) { + if (masked_image == NULL) { + int64_t mask_channels = 1; + if (sd_ctx->sd->version == VERSION_FLUX_FILL) { + mask_channels = 8 * 8; // flatten the whole mask + } + // no mask, set the whole image as masked + masked_image = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, init_latent->ne[0], init_latent->ne[1], mask_channels + init_latent->ne[2], 1); + for (int64_t x = 0; x < masked_image->ne[0]; x++) { + for (int64_t y = 0; y < masked_image->ne[1]; y++) { + if (sd_ctx->sd->version == VERSION_FLUX_FILL) { + // TODO: this might be wrong + for (int64_t c = 0; c < init_latent->ne[2]; c++) { + ggml_tensor_set_f32(masked_image, 0, x, y, c); + } + for (int64_t c = init_latent->ne[2]; c < masked_image->ne[2]; c++) { + ggml_tensor_set_f32(masked_image, 1, x, y, c); + } + } else { + ggml_tensor_set_f32(masked_image, 1, x, y, 0); + for (int64_t c = 1; c < masked_image->ne[2]; c++) { + ggml_tensor_set_f32(masked_image, 0, x, y, c); + } + } + } + } + } + cond.c_concat = masked_image; + uncond.c_concat = masked_image; + } else { + noise_mask = masked_image; + } + for (int b = 0; b < batch_count; b++) { + int64_t sampling_start = ggml_time_ms(); + int64_t cur_seed = seed + b; + LOG_INFO("generating image: %i/%i - seed %" PRId64, b + 1, batch_count, cur_seed); + + sd_ctx->sd->rng->manual_seed(cur_seed); + struct ggml_tensor* x_t = init_latent; + struct ggml_tensor* noise = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, W, H, C, 1); + ggml_tensor_set_f32_randn(noise, sd_ctx->sd->rng); + + int start_merge_step = -1; + if (sd_ctx->sd->stacked_id) { + start_merge_step = int(sd_ctx->sd->pmid_model->style_strength / 100.f * sample_steps); + // if (start_merge_step > 30) + // start_merge_step = 30; + LOG_INFO("PHOTOMAKER: start_merge_step: %d", start_merge_step); + } + + struct ggml_tensor* x_0 = sd_ctx->sd->sample(work_ctx, + x_t, + noise, + cond, + uncond, + image_hint, + control_strength, + cfg_scale, + cfg_scale, + guidance, + eta, + sample_method, + sigmas, + start_merge_step, + id_cond, + ref_latents, + skip_layers, + slg_scale, + skip_layer_start, + skip_layer_end, + noise_mask); + + // struct ggml_tensor* x_0 = load_tensor_from_file(ctx, "samples_ddim.bin"); + // print_ggml_tensor(x_0); + int64_t sampling_end = ggml_time_ms(); + LOG_INFO("sampling completed, taking %.2fs", (sampling_end - sampling_start) * 1.0f / 1000); + final_latents.push_back(x_0); + } + + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->diffusion_model->free_params_buffer(); + } + int64_t t3 = ggml_time_ms(); + LOG_INFO("generating %" PRId64 " latent images completed, taking %.2fs", final_latents.size(), (t3 - t1) * 1.0f / 1000); + + // Decode to image + LOG_INFO("decoding %zu latents", final_latents.size()); + std::vector decoded_images; // collect decoded images + for (size_t i = 0; i < final_latents.size(); i++) { + t1 = ggml_time_ms(); + struct ggml_tensor* img = sd_ctx->sd->decode_first_stage(work_ctx, final_latents[i] /* x_0 */); + // print_ggml_tensor(img); + if (img != NULL) { + decoded_images.push_back(img); + } + int64_t t2 = ggml_time_ms(); + LOG_INFO("latent %" PRId64 " decoded, taking %.2fs", i + 1, (t2 - t1) * 1.0f / 1000); + } + + int64_t t4 = ggml_time_ms(); + LOG_INFO("decode_first_stage completed, taking %.2fs", (t4 - t3) * 1.0f / 1000); + if (sd_ctx->sd->free_params_immediately && !sd_ctx->sd->use_tiny_autoencoder) { + sd_ctx->sd->first_stage_model->free_params_buffer(); + } + sd_image_t* result_images = (sd_image_t*)calloc(batch_count, sizeof(sd_image_t)); + if (result_images == NULL) { + ggml_free(work_ctx); + return NULL; } -}; -/*================================================= StableDiffusion ==================================================*/ + for (size_t i = 0; i < decoded_images.size(); i++) { + result_images[i].width = width; + result_images[i].height = height; + result_images[i].channel = 3; + result_images[i].data = sd_tensor_to_image(decoded_images[i]); + } + ggml_free(work_ctx); -StableDiffusion::StableDiffusion(int n_threads, - bool vae_decode_only, - bool free_params_immediately) { - sd = std::make_shared(n_threads, - vae_decode_only, - free_params_immediately); + return result_images; } -bool StableDiffusion::load_from_file(const std::string& file_path) { - return sd->load_from_file(file_path); +ggml_tensor* generate_init_latent(sd_ctx_t* sd_ctx, + ggml_context* work_ctx, + int width, + int height) { + int C = 4; + if (sd_version_is_sd3(sd_ctx->sd->version)) { + C = 16; + } else if (sd_version_is_flux(sd_ctx->sd->version)) { + C = 16; + } + int W = width / 8; + int H = height / 8; + ggml_tensor* init_latent = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, W, H, C, 1); + if (sd_version_is_sd3(sd_ctx->sd->version)) { + ggml_set_f32(init_latent, 0.0609f); + } else if (sd_version_is_flux(sd_ctx->sd->version)) { + ggml_set_f32(init_latent, 0.1159f); + } else { + ggml_set_f32(init_latent, 0.f); + } + return init_latent; } -std::vector StableDiffusion::txt2img(const std::string& prompt, - const std::string& negative_prompt, - float cfg_scale, - int width, - int height, - SampleMethod sample_method, - int sample_steps, - int seed) { - std::vector result; +sd_image_t* txt2img(sd_ctx_t* sd_ctx, + const char* prompt_c_str, + const char* negative_prompt_c_str, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + enum sample_method_t sample_method, + int sample_steps, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_ratio, + bool normalize_input, + const char* input_id_images_path_c_str, + int* skip_layers = NULL, + size_t skip_layers_count = 0, + float slg_scale = 0, + float skip_layer_start = 0.01, + float skip_layer_end = 0.2) { + std::vector skip_layers_vec(skip_layers, skip_layers + skip_layers_count); + LOG_DEBUG("txt2img %dx%d", width, height); + if (sd_ctx == NULL) { + return NULL; + } + struct ggml_init_params params; - params.mem_size = static_cast(10 * 1024) * 1024; // 10M + params.mem_size = static_cast(10 * 1024 * 1024); // 10 MB + if (sd_version_is_sd3(sd_ctx->sd->version)) { + params.mem_size *= 3; + } + if (sd_version_is_flux(sd_ctx->sd->version)) { + params.mem_size *= 4; + } + if (sd_ctx->sd->stacked_id) { + params.mem_size += static_cast(10 * 1024 * 1024); // 10 MB + } + params.mem_size += width * height * 3 * sizeof(float); + params.mem_size *= batch_count; params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = false; - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { + params.no_alloc = false; + // LOG_DEBUG("mem_size %u ", params.mem_size); + + struct ggml_context* work_ctx = ggml_init(params); + if (!work_ctx) { LOG_ERROR("ggml_init() failed"); - return result; + return NULL; } - if (seed < 0) { - seed = (int)time(NULL); - } - set_random_seed(seed); + size_t t0 = ggml_time_ms(); - int64_t t0 = ggml_time_ms(); - ggml_tensor* c = sd->get_learned_condition(ctx, prompt); - struct ggml_tensor* uc = NULL; - if (cfg_scale != 1.0) { - uc = sd->get_learned_condition(ctx, negative_prompt); - } - int64_t t1 = ggml_time_ms(); - LOG_INFO("get_learned_condition completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + std::vector sigmas = sd_ctx->sd->denoiser->get_sigmas(sample_steps); - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->clip_params_ctx); - ggml_free(sd->clip_params_ctx); - sd->clip_params_ctx = NULL; + if (sd_version_is_inpaint(sd_ctx->sd->version)) { + LOG_WARN("This is an inpainting model, this should only be used in img2img mode with a mask"); } - int C = 4; - int W = width / 8; - int H = height / 8; - struct ggml_tensor* x_t = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, W, H, C, 1); - ggml_tensor_set_f32_randn(x_t); + ggml_tensor* init_latent = generate_init_latent(sd_ctx, work_ctx, width, height); - std::vector sigmas = sd->denoiser.get_sigmas(sample_steps); + sd_image_t* result_images = generate_image(sd_ctx, + work_ctx, + init_latent, + prompt_c_str, + negative_prompt_c_str, + clip_skip, + cfg_scale, + guidance, + eta, + width, + height, + sample_method, + sigmas, + seed, + batch_count, + control_cond, + control_strength, + style_ratio, + normalize_input, + input_id_images_path_c_str, + {}, + skip_layers_vec, + slg_scale, + skip_layer_start, + skip_layer_end); - LOG_INFO("start sampling"); - struct ggml_tensor* x_0 = sd->sample(ctx, x_t, c, uc, cfg_scale, sample_method, sigmas); - // struct ggml_tensor* x_0 = load_tensor_from_file(ctx, "samples_ddim.bin"); - // print_ggml_tensor(x_0); - int64_t t2 = ggml_time_ms(); - LOG_INFO("sampling completed, taking %.2fs", (t2 - t1) * 1.0f / 1000); + size_t t1 = ggml_time_ms(); + + LOG_INFO("txt2img completed in %.2fs", (t1 - t0) * 1.0f / 1000); + + return result_images; +} - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->unet_params_ctx); - ggml_free(sd->unet_params_ctx); - sd->unet_params_ctx = NULL; +sd_image_t* img2img(sd_ctx_t* sd_ctx, + sd_image_t init_image, + sd_image_t mask, + const char* prompt_c_str, + const char* negative_prompt_c_str, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_ratio, + bool normalize_input, + const char* input_id_images_path_c_str, + int* skip_layers = NULL, + size_t skip_layers_count = 0, + float slg_scale = 0, + float skip_layer_start = 0.01, + float skip_layer_end = 0.2) { + std::vector skip_layers_vec(skip_layers, skip_layers + skip_layers_count); + LOG_DEBUG("img2img %dx%d", width, height); + if (sd_ctx == NULL) { + return NULL; } - struct ggml_tensor* img = sd->decode_first_stage(ctx, x_0); - if (img != NULL) { - result = ggml_to_image_vec(img); + struct ggml_init_params params; + params.mem_size = static_cast(10 * 1024 * 1024); // 10 MB + if (sd_version_is_sd3(sd_ctx->sd->version)) { + params.mem_size *= 2; } - int64_t t3 = ggml_time_ms(); - LOG_INFO("decode_first_stage completed, taking %.2fs", (t3 - t2) * 1.0f / 1000); + if (sd_version_is_flux(sd_ctx->sd->version)) { + params.mem_size *= 3; + } + if (sd_ctx->sd->stacked_id) { + params.mem_size += static_cast(10 * 1024 * 1024); // 10 MB + } + params.mem_size += width * height * 3 * sizeof(float) * 3; + params.mem_size *= batch_count; + params.mem_buffer = NULL; + params.no_alloc = false; + // LOG_DEBUG("mem_size %u ", params.mem_size); - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->vae_params_ctx); - ggml_free(sd->vae_params_ctx); - sd->vae_params_ctx = NULL; + struct ggml_context* work_ctx = ggml_init(params); + if (!work_ctx) { + LOG_ERROR("ggml_init() failed"); + return NULL; } - LOG_INFO( - "txt2img completed in %.2fs, use %.2fMB of memory: peak params memory %.2fMB, " - "peak runtime memory %.2fMB", - (t3 - t0) * 1.0f / 1000, - sd->max_mem_size * 1.0f / 1024 / 1024, - sd->max_params_mem_size * 1.0f / 1024 / 1024, - sd->max_rt_mem_size * 1.0f / 1024 / 1024); + size_t t0 = ggml_time_ms(); - ggml_free(ctx); - return result; -} + if (seed < 0) { + srand((int)time(NULL)); + seed = rand(); + } + sd_ctx->sd->rng->manual_seed(seed); -std::vector StableDiffusion::img2img(const std::vector& init_img_vec, - const std::string& prompt, - const std::string& negative_prompt, - float cfg_scale, - int width, - int height, - SampleMethod sample_method, - int sample_steps, - float strength, - int seed) { - std::vector result; - if (init_img_vec.size() != width * height * 3) { - return result; + ggml_tensor* init_img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 3, 1); + ggml_tensor* mask_img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 1, 1); + + sd_mask_to_tensor(mask.data, mask_img); + + sd_image_to_tensor(init_image.data, init_img); + + ggml_tensor* masked_image; + + if (sd_version_is_inpaint(sd_ctx->sd->version)) { + int64_t mask_channels = 1; + if (sd_ctx->sd->version == VERSION_FLUX_FILL) { + mask_channels = 8 * 8; // flatten the whole mask + } + ggml_tensor* masked_img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width, height, 3, 1); + sd_apply_mask(init_img, mask_img, masked_img); + ggml_tensor* masked_image_0 = NULL; + if (!sd_ctx->sd->use_tiny_autoencoder) { + ggml_tensor* moments = sd_ctx->sd->encode_first_stage(work_ctx, masked_img); + masked_image_0 = sd_ctx->sd->get_first_stage_encoding(work_ctx, moments); + } else { + masked_image_0 = sd_ctx->sd->encode_first_stage(work_ctx, masked_img); + } + masked_image = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, masked_image_0->ne[0], masked_image_0->ne[1], mask_channels + masked_image_0->ne[2], 1); + for (int ix = 0; ix < masked_image_0->ne[0]; ix++) { + for (int iy = 0; iy < masked_image_0->ne[1]; iy++) { + int mx = ix * 8; + int my = iy * 8; + if (sd_ctx->sd->version == VERSION_FLUX_FILL) { + for (int k = 0; k < masked_image_0->ne[2]; k++) { + float v = ggml_tensor_get_f32(masked_image_0, ix, iy, k); + ggml_tensor_set_f32(masked_image, v, ix, iy, k); + } + // "Encode" 8x8 mask chunks into a flattened 1x64 vector, and concatenate to masked image + for (int x = 0; x < 8; x++) { + for (int y = 0; y < 8; y++) { + float m = ggml_tensor_get_f32(mask_img, mx + x, my + y); + // TODO: check if the way the mask is flattened is correct (is it supposed to be x*8+y or x+8*y?) + // python code was using "b (h 8) (w 8) -> b (8 8) h w" + ggml_tensor_set_f32(masked_image, m, ix, iy, masked_image_0->ne[2] + x * 8 + y); + } + } + } else { + float m = ggml_tensor_get_f32(mask_img, mx, my); + ggml_tensor_set_f32(masked_image, m, ix, iy, 0); + for (int k = 0; k < masked_image_0->ne[2]; k++) { + float v = ggml_tensor_get_f32(masked_image_0, ix, iy, k); + ggml_tensor_set_f32(masked_image, v, ix, iy, k + mask_channels); + } + } + } + } + } else { + // LOG_WARN("Inpainting with a base model is not great"); + masked_image = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, width / 8, height / 8, 1, 1); + for (int ix = 0; ix < masked_image->ne[0]; ix++) { + for (int iy = 0; iy < masked_image->ne[1]; iy++) { + int mx = ix * 8; + int my = iy * 8; + float m = ggml_tensor_get_f32(mask_img, mx, my); + ggml_tensor_set_f32(masked_image, m, ix, iy); + } + } + } + + ggml_tensor* init_latent = NULL; + if (!sd_ctx->sd->use_tiny_autoencoder) { + ggml_tensor* moments = sd_ctx->sd->encode_first_stage(work_ctx, init_img); + init_latent = sd_ctx->sd->get_first_stage_encoding(work_ctx, moments); + } else { + init_latent = sd_ctx->sd->encode_first_stage(work_ctx, init_img); } - LOG_INFO("img2img %dx%d", width, height); - std::vector sigmas = sd->denoiser.get_sigmas(sample_steps); - size_t t_enc = static_cast(sample_steps * strength); + print_ggml_tensor(init_latent, true); + size_t t1 = ggml_time_ms(); + LOG_INFO("encode_first_stage completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + + std::vector sigmas = sd_ctx->sd->denoiser->get_sigmas(sample_steps); + size_t t_enc = static_cast(sample_steps * strength); + if (t_enc == sample_steps) + t_enc--; LOG_INFO("target t_enc is %zu steps", t_enc); std::vector sigma_sched; sigma_sched.assign(sigmas.begin() + sample_steps - t_enc - 1, sigmas.end()); + sd_image_t* result_images = generate_image(sd_ctx, + work_ctx, + init_latent, + prompt_c_str, + negative_prompt_c_str, + clip_skip, + cfg_scale, + guidance, + eta, + width, + height, + sample_method, + sigma_sched, + seed, + batch_count, + control_cond, + control_strength, + style_ratio, + normalize_input, + input_id_images_path_c_str, + {}, + skip_layers_vec, + slg_scale, + skip_layer_start, + skip_layer_end, + masked_image); + + size_t t2 = ggml_time_ms(); + + LOG_INFO("img2img completed in %.2fs", (t2 - t0) * 1.0f / 1000); + + return result_images; +} + +SD_API sd_image_t* img2vid(sd_ctx_t* sd_ctx, + sd_image_t init_image, + int width, + int height, + int video_frames, + int motion_bucket_id, + int fps, + float augmentation_level, + float min_cfg, + float cfg_scale, + enum sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed) { + if (sd_ctx == NULL) { + return NULL; + } + + LOG_INFO("img2vid %dx%d", width, height); + + std::vector sigmas = sd_ctx->sd->denoiser->get_sigmas(sample_steps); + struct ggml_init_params params; - params.mem_size = static_cast(10 * 1024) * 1024; // 10M - params.mem_size += width * height * 3 * sizeof(float) * 2; + params.mem_size = static_cast(10 * 1024) * 1024; // 10 MB + params.mem_size += width * height * 3 * sizeof(float) * video_frames; params.mem_buffer = NULL; - params.no_alloc = false; - params.dynamic = false; - struct ggml_context* ctx = ggml_init(params); - if (!ctx) { + params.no_alloc = false; + // LOG_DEBUG("mem_size %u ", params.mem_size); + + // draft context + struct ggml_context* work_ctx = ggml_init(params); + if (!work_ctx) { LOG_ERROR("ggml_init() failed"); - return result; + return NULL; } if (seed < 0) { seed = (int)time(NULL); } - set_random_seed(seed); - ggml_tensor* init_img = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, width, height, 3, 1); - image_vec_to_ggml(init_img_vec, init_img); + sd_ctx->sd->rng->manual_seed(seed); int64_t t0 = ggml_time_ms(); - ggml_tensor* moments = sd->encode_first_stage(ctx, init_img); - ggml_tensor* init_latent = sd->get_first_stage_encoding(ctx, moments); - // print_ggml_tensor(init_latent); + + SDCondition cond = sd_ctx->sd->get_svd_condition(work_ctx, + init_image, + width, + height, + fps, + motion_bucket_id, + augmentation_level); + + auto uc_crossattn = ggml_dup_tensor(work_ctx, cond.c_crossattn); + ggml_set_f32(uc_crossattn, 0.f); + + auto uc_concat = ggml_dup_tensor(work_ctx, cond.c_concat); + ggml_set_f32(uc_concat, 0.f); + + auto uc_vector = ggml_dup_tensor(work_ctx, cond.c_vector); + + SDCondition uncond = SDCondition(uc_crossattn, uc_vector, uc_concat); + int64_t t1 = ggml_time_ms(); - LOG_INFO("encode_first_stage completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + LOG_INFO("get_learned_condition completed, taking %" PRId64 " ms", t1 - t0); + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->clip_vision->free_params_buffer(); + } + + sd_ctx->sd->rng->manual_seed(seed); + int C = 4; + int W = width / 8; + int H = height / 8; + struct ggml_tensor* x_t = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, W, H, C, video_frames); + ggml_set_f32(x_t, 0.f); + + struct ggml_tensor* noise = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, W, H, C, video_frames); + ggml_tensor_set_f32_randn(noise, sd_ctx->sd->rng); + + LOG_INFO("sampling using %s method", sampling_methods_str[sample_method]); + struct ggml_tensor* x_0 = sd_ctx->sd->sample(work_ctx, + x_t, + noise, + cond, + uncond, + {}, + 0.f, + min_cfg, + cfg_scale, + 0.f, + 0.f, + sample_method, + sigmas, + -1, + SDCondition(NULL, NULL, NULL)); + + int64_t t2 = ggml_time_ms(); + LOG_INFO("sampling completed, taking %.2fs", (t2 - t1) * 1.0f / 1000); + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->diffusion_model->free_params_buffer(); + } - ggml_reset_curr_max_dynamic_size(); // reset counter + struct ggml_tensor* img = sd_ctx->sd->decode_first_stage(work_ctx, x_0); + if (sd_ctx->sd->free_params_immediately) { + sd_ctx->sd->first_stage_model->free_params_buffer(); + } + if (img == NULL) { + ggml_free(work_ctx); + return NULL; + } - ggml_tensor* c = sd->get_learned_condition(ctx, prompt); - struct ggml_tensor* uc = NULL; - if (cfg_scale != 1.0) { - uc = sd->get_learned_condition(ctx, negative_prompt); + sd_image_t* result_images = (sd_image_t*)calloc(video_frames, sizeof(sd_image_t)); + if (result_images == NULL) { + ggml_free(work_ctx); + return NULL; } - int64_t t2 = ggml_time_ms(); - LOG_INFO("get_learned_condition completed, taking %.2fs", (t2 - t1) * 1.0f / 1000); - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->clip_params_ctx); - ggml_free(sd->clip_params_ctx); - sd->clip_params_ctx = NULL; + + for (size_t i = 0; i < video_frames; i++) { + auto img_i = ggml_view_3d(work_ctx, img, img->ne[0], img->ne[1], img->ne[2], img->nb[1], img->nb[2], img->nb[3] * i); + + result_images[i].width = width; + result_images[i].height = height; + result_images[i].channel = 3; + result_images[i].data = sd_tensor_to_image(img_i); } + ggml_free(work_ctx); - LOG_INFO("start sampling"); - struct ggml_tensor* x_0 = sd->sample(ctx, init_latent, c, uc, cfg_scale, sample_method, sigma_sched); - // struct ggml_tensor *x_0 = load_tensor_from_file(ctx, "samples_ddim.bin"); - // print_ggml_tensor(x_0); int64_t t3 = ggml_time_ms(); - LOG_INFO("sampling completed, taking %.2fs", (t3 - t2) * 1.0f / 1000); - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->unet_params_ctx); - ggml_free(sd->unet_params_ctx); - sd->unet_params_ctx = NULL; + + LOG_INFO("img2vid completed in %.2fs", (t3 - t0) * 1.0f / 1000); + + return result_images; +} + +sd_image_t* edit(sd_ctx_t* sd_ctx, + sd_image_t* ref_images, + int ref_images_count, + const char* prompt_c_str, + const char* negative_prompt_c_str, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_ratio, + bool normalize_input, + int* skip_layers = NULL, + size_t skip_layers_count = 0, + float slg_scale = 0, + float skip_layer_start = 0.01, + float skip_layer_end = 0.2) { + std::vector skip_layers_vec(skip_layers, skip_layers + skip_layers_count); + LOG_DEBUG("edit %dx%d", width, height); + if (sd_ctx == NULL) { + return NULL; + } + if (ref_images_count <= 0) { + LOG_ERROR("ref images count should > 0"); + return NULL; } - struct ggml_tensor* img = sd->decode_first_stage(ctx, x_0); - if (img != NULL) { - result = ggml_to_image_vec(img); + struct ggml_init_params params; + params.mem_size = static_cast(30 * 1024 * 1024); // 10 MB + params.mem_size += width * height * 3 * sizeof(float) * 3 * ref_images_count; + params.mem_size *= batch_count; + params.mem_buffer = NULL; + params.no_alloc = false; + // LOG_DEBUG("mem_size %u ", params.mem_size); + + struct ggml_context* work_ctx = ggml_init(params); + if (!work_ctx) { + LOG_ERROR("ggml_init() failed"); + return NULL; } - int64_t t4 = ggml_time_ms(); - LOG_INFO("decode_first_stage completed, taking %.2fs", (t4 - t3) * 1.0f / 1000); - if (sd->free_params_immediately) { - sd->curr_params_mem_size -= ggml_used_mem(sd->vae_params_ctx); - ggml_free(sd->vae_params_ctx); - sd->vae_params_ctx = NULL; + if (seed < 0) { + srand((int)time(NULL)); + seed = rand(); } + sd_ctx->sd->rng->manual_seed(seed); - LOG_INFO( - "img2img completed in %.2fs, use %.2fMB of memory: peak params memory %.2fMB, " - "peak runtime memory %.2fMB", - (t4 - t0) * 1.0f / 1000, - sd->max_mem_size * 1.0f / 1024 / 1024, - sd->max_params_mem_size * 1.0f / 1024 / 1024, - sd->max_rt_mem_size * 1.0f / 1024 / 1024); + size_t t0 = ggml_time_ms(); - ggml_free(ctx); + std::vector ref_latents; + for (int i = 0; i < ref_images_count; i++) { + ggml_tensor* img = ggml_new_tensor_4d(work_ctx, GGML_TYPE_F32, ref_images[i].width, ref_images[i].height, 3, 1); + sd_image_to_tensor(ref_images[i].data, img); - return result; -} + ggml_tensor* latent = NULL; + if (!sd_ctx->sd->use_tiny_autoencoder) { + ggml_tensor* moments = sd_ctx->sd->encode_first_stage(work_ctx, img); + latent = sd_ctx->sd->get_first_stage_encoding(work_ctx, moments); + } else { + latent = sd_ctx->sd->encode_first_stage(work_ctx, img); + } + ref_latents.push_back(latent); + } + + size_t t1 = ggml_time_ms(); + LOG_INFO("encode_first_stage completed, taking %.2fs", (t1 - t0) * 1.0f / 1000); + + std::vector sigmas = sd_ctx->sd->denoiser->get_sigmas(sample_steps); + + ggml_tensor* init_latent = generate_init_latent(sd_ctx, work_ctx, width, height); + + sd_image_t* result_images = generate_image(sd_ctx, + work_ctx, + init_latent, + prompt_c_str, + negative_prompt_c_str, + clip_skip, + cfg_scale, + guidance, + eta, + width, + height, + sample_method, + sigmas, + seed, + batch_count, + control_cond, + control_strength, + style_ratio, + normalize_input, + "", + ref_latents, + skip_layers_vec, + slg_scale, + skip_layer_start, + skip_layer_end, + NULL); + + size_t t2 = ggml_time_ms(); + + LOG_INFO("edit completed in %.2fs", (t2 - t0) * 1.0f / 1000); + + return result_images; +} \ No newline at end of file diff --git a/stable-diffusion.h b/stable-diffusion.h index 730a65538..212e1c918 100644 --- a/stable-diffusion.h +++ b/stable-diffusion.h @@ -1,54 +1,275 @@ #ifndef __STABLE_DIFFUSION_H__ #define __STABLE_DIFFUSION_H__ -#include -#include - -enum class SDLogLevel { - DEBUG, - INFO, - WARN, - ERROR +#if defined(_WIN32) || defined(__CYGWIN__) +#ifndef SD_BUILD_SHARED_LIB +#define SD_API +#else +#ifdef SD_BUILD_DLL +#define SD_API __declspec(dllexport) +#else +#define SD_API __declspec(dllimport) +#endif +#endif +#else +#if __GNUC__ >= 4 +#define SD_API __attribute__((visibility("default"))) +#else +#define SD_API +#endif +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include +#include +#include + +enum rng_type_t { + STD_DEFAULT_RNG, + CUDA_RNG +}; + +enum sample_method_t { + EULER_A, + EULER, + HEUN, + DPM2, + DPMPP2S_A, + DPMPP2M, + DPMPP2Mv2, + IPNDM, + IPNDM_V, + LCM, + DDIM_TRAILING, + TCD, + N_SAMPLE_METHODS +}; + +enum schedule_t { + DEFAULT, + DISCRETE, + KARRAS, + EXPONENTIAL, + AYS, + GITS, + N_SCHEDULES }; -enum SampleMethod { - EULAR_A, +// same as enum ggml_type +enum sd_type_t { + SD_TYPE_F32 = 0, + SD_TYPE_F16 = 1, + SD_TYPE_Q4_0 = 2, + SD_TYPE_Q4_1 = 3, + // SD_TYPE_Q4_2 = 4, support has been removed + // SD_TYPE_Q4_3 = 5, support has been removed + SD_TYPE_Q5_0 = 6, + SD_TYPE_Q5_1 = 7, + SD_TYPE_Q8_0 = 8, + SD_TYPE_Q8_1 = 9, + SD_TYPE_Q2_K = 10, + SD_TYPE_Q3_K = 11, + SD_TYPE_Q4_K = 12, + SD_TYPE_Q5_K = 13, + SD_TYPE_Q6_K = 14, + SD_TYPE_Q8_K = 15, + SD_TYPE_IQ2_XXS = 16, + SD_TYPE_IQ2_XS = 17, + SD_TYPE_IQ3_XXS = 18, + SD_TYPE_IQ1_S = 19, + SD_TYPE_IQ4_NL = 20, + SD_TYPE_IQ3_S = 21, + SD_TYPE_IQ2_S = 22, + SD_TYPE_IQ4_XS = 23, + SD_TYPE_I8 = 24, + SD_TYPE_I16 = 25, + SD_TYPE_I32 = 26, + SD_TYPE_I64 = 27, + SD_TYPE_F64 = 28, + SD_TYPE_IQ1_M = 29, + SD_TYPE_BF16 = 30, + // SD_TYPE_Q4_0_4_4 = 31, support has been removed from gguf files + // SD_TYPE_Q4_0_4_8 = 32, + // SD_TYPE_Q4_0_8_8 = 33, + SD_TYPE_TQ1_0 = 34, + SD_TYPE_TQ2_0 = 35, + // SD_TYPE_IQ4_NL_4_4 = 36, + // SD_TYPE_IQ4_NL_4_8 = 37, + // SD_TYPE_IQ4_NL_8_8 = 38, + SD_TYPE_COUNT = 39, }; -class StableDiffusionGGML; - -class StableDiffusion { - private: - std::shared_ptr sd; - - public: - StableDiffusion(int n_threads = -1, - bool vae_decode_only = false, - bool free_params_immediately = false); - bool load_from_file(const std::string& file_path); - std::vector txt2img( - const std::string& prompt, - const std::string& negative_prompt, - float cfg_scale, - int width, - int height, - SampleMethod sample_method, - int sample_steps, - int seed); - std::vector img2img( - const std::vector& init_img, - const std::string& prompt, - const std::string& negative_prompt, - float cfg_scale, - int width, - int height, - SampleMethod sample_method, - int sample_steps, - float strength, - int seed); +SD_API const char* sd_type_name(enum sd_type_t type); + +enum sd_log_level_t { + SD_LOG_DEBUG, + SD_LOG_INFO, + SD_LOG_WARN, + SD_LOG_ERROR }; -void set_sd_log_level(SDLogLevel level); -std::string sd_get_system_info(); +typedef void (*sd_log_cb_t)(enum sd_log_level_t level, const char* text, void* data); +typedef void (*sd_progress_cb_t)(int step, int steps, float time, void* data); + +SD_API void sd_set_log_callback(sd_log_cb_t sd_log_cb, void* data); +SD_API void sd_set_progress_callback(sd_progress_cb_t cb, void* data); +SD_API int32_t get_num_physical_cores(); +SD_API const char* sd_get_system_info(); + +typedef struct { + uint32_t width; + uint32_t height; + uint32_t channel; + uint8_t* data; +} sd_image_t; + +typedef struct sd_ctx_t sd_ctx_t; + +SD_API sd_ctx_t* new_sd_ctx(const char* model_path, + const char* clip_l_path, + const char* clip_g_path, + const char* t5xxl_path, + const char* diffusion_model_path, + const char* vae_path, + const char* taesd_path, + const char* control_net_path_c_str, + const char* lora_model_dir, + const char* embed_dir_c_str, + const char* stacked_id_embed_dir_c_str, + bool vae_decode_only, + bool vae_tiling, + bool free_params_immediately, + int n_threads, + enum sd_type_t wtype, + enum rng_type_t rng_type, + enum schedule_t s, + bool keep_clip_on_cpu, + bool keep_control_net_cpu, + bool keep_vae_on_cpu, + bool diffusion_flash_attn, + bool chroma_use_dit_mask, + bool chroma_use_t5_mask, + int chroma_t5_mask_pad); + +SD_API void free_sd_ctx(sd_ctx_t* sd_ctx); + +SD_API sd_image_t* txt2img(sd_ctx_t* sd_ctx, + const char* prompt, + const char* negative_prompt, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + enum sample_method_t sample_method, + int sample_steps, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_strength, + bool normalize_input, + const char* input_id_images_path, + int* skip_layers, + size_t skip_layers_count, + float slg_scale, + float skip_layer_start, + float skip_layer_end); + +SD_API sd_image_t* img2img(sd_ctx_t* sd_ctx, + sd_image_t init_image, + sd_image_t mask_image, + const char* prompt, + const char* negative_prompt, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + enum sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_strength, + bool normalize_input, + const char* input_id_images_path, + int* skip_layers, + size_t skip_layers_count, + float slg_scale, + float skip_layer_start, + float skip_layer_end); + +SD_API sd_image_t* img2vid(sd_ctx_t* sd_ctx, + sd_image_t init_image, + int width, + int height, + int video_frames, + int motion_bucket_id, + int fps, + float augmentation_level, + float min_cfg, + float cfg_scale, + enum sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed); + +SD_API sd_image_t* edit(sd_ctx_t* sd_ctx, + sd_image_t* ref_images, + int ref_images_count, + const char* prompt, + const char* negative_prompt, + int clip_skip, + float cfg_scale, + float guidance, + float eta, + int width, + int height, + enum sample_method_t sample_method, + int sample_steps, + float strength, + int64_t seed, + int batch_count, + const sd_image_t* control_cond, + float control_strength, + float style_strength, + bool normalize_input, + int* skip_layers, + size_t skip_layers_count, + float slg_scale, + float skip_layer_start, + float skip_layer_end); + +typedef struct upscaler_ctx_t upscaler_ctx_t; + +SD_API upscaler_ctx_t* new_upscaler_ctx(const char* esrgan_path, + int n_threads); +SD_API void free_upscaler_ctx(upscaler_ctx_t* upscaler_ctx); + +SD_API sd_image_t upscale(upscaler_ctx_t* upscaler_ctx, sd_image_t input_image, uint32_t upscale_factor); + +SD_API bool convert(const char* input_path, const char* vae_path, const char* output_path, enum sd_type_t output_type, const char* tensor_type_rules); + +SD_API uint8_t* preprocess_canny(uint8_t* img, + int width, + int height, + float high_threshold, + float low_threshold, + float weak, + float strong, + bool inverse); + +#ifdef __cplusplus +} +#endif -#endif // __STABLE_DIFFUSION_H__ \ No newline at end of file +#endif // __STABLE_DIFFUSION_H__ diff --git a/t5.hpp b/t5.hpp new file mode 100644 index 000000000..d511ef24b --- /dev/null +++ b/t5.hpp @@ -0,0 +1,1008 @@ +#ifndef __T5_HPP__ +#define __T5_HPP__ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "darts.h" +#include "ggml_extend.hpp" +#include "json.hpp" +#include "model.h" + +// Port from: https://github.com/google/sentencepiece/blob/master/src/unigram_model.h +// and https://github.com/google/sentencepiece/blob/master/src/unigram_model.h. +// Original License: https://github.com/google/sentencepiece/blob/master/LICENSE +// +// Since tokenization is not the bottleneck in SD, performance was not a major consideration +// during the migration. +class MetaspacePreTokenizer { +private: + std::string replacement; + bool add_prefix_space; + +public: + MetaspacePreTokenizer(const std::string replacement = " ", bool add_prefix_space = true) + : replacement(replacement), add_prefix_space(add_prefix_space) {} + + std::string tokenize(const std::string& input) const { + std::string tokens; + std::stringstream ss(input); + + if (add_prefix_space) { + tokens += replacement; + } + + std::string token; + bool firstToken = true; + while (std::getline(ss, token, ' ')) { + if (!firstToken) + tokens += replacement + token; + else + tokens += token; + + firstToken = false; + } + + return tokens; + } +}; + +using EncodeResult = std::vector>; +class T5UniGramTokenizer { +public: + enum Status { + OK, + NO_PIECES_LOADED, + NO_ENTRY_FOUND, + BUILD_DOUBLE_ARRAY_FAILED, + PIECE_ALREADY_DEFINED, + INVLIAD_JSON + }; + +protected: + MetaspacePreTokenizer pre_tokenizer; + + // all pairs + std::vector> piece_score_pairs; + + float min_score_ = 0.0; + float max_score_ = 0.0; + std::unique_ptr trie_; + + // Maximum size of the return value of Trie, which corresponds + // to the maximum size of shared common prefix in the sentence pieces. + int trie_results_size_; + // unknown id. + int unk_id_ = 2; + std::string eos_token_ = ""; + int eos_id_ = 1; + int pad_id_ = 0; + // status. + Status status_ = OK; + + float kUnkPenalty = 10.0; + + std::string replacement; + bool add_prefix_space = true; + + void InitializePieces(const std::string& json_str) { + nlohmann::json data; + + try { + data = nlohmann::json::parse(json_str); + } catch (const nlohmann::json::parse_error& e) { + status_ = INVLIAD_JSON; + return; + } + if (!data.contains("model")) { + status_ = INVLIAD_JSON; + return; + } + nlohmann::json model = data["model"]; + if (!model.contains("vocab")) { + status_ = INVLIAD_JSON; + return; + } + if (model.contains("unk_id")) { + unk_id_ = model["unk_id"]; + } + + replacement = data["pre_tokenizer"]["replacement"]; + add_prefix_space = data["pre_tokenizer"]["add_prefix_space"]; + + pre_tokenizer = MetaspacePreTokenizer(replacement, add_prefix_space); + + for (const auto& item : model["vocab"]) { + if (item.size() != 2 || !item[0].is_string() || !item[1].is_number_float()) { + status_ = INVLIAD_JSON; + return; + } + std::string piece = item[0]; + float score = item[1]; + piece_score_pairs.emplace_back(piece, score); + } + } + + // Builds a Trie index. + void BuildTrie(std::vector>* pieces) { + if (status_ != OK) + return; + + if (pieces->empty()) { + status_ = NO_PIECES_LOADED; + return; + } + + // sort by sentencepiece since DoubleArray::build() + // only accepts sorted strings. + sort(pieces->begin(), pieces->end()); + + // Makes key/value set for DoubleArrayTrie. + std::vector key(pieces->size()); + std::vector value(pieces->size()); + for (size_t i = 0; i < pieces->size(); ++i) { + key[i] = (*pieces)[i].first.data(); // sorted piece. + value[i] = (*pieces)[i].second; // vocab_id + } + + trie_ = std::unique_ptr(new Darts::DoubleArray()); + if (trie_->build(key.size(), const_cast(&key[0]), nullptr, + &value[0]) != 0) { + status_ = BUILD_DOUBLE_ARRAY_FAILED; + return; + } + + // Computes the maximum number of shared prefixes in the trie. + const int kMaxTrieResultsSize = 1024; + std::vector results( + kMaxTrieResultsSize); + trie_results_size_ = 0; + for (const auto& p : *pieces) { + const int num_nodes = trie_->commonPrefixSearch( + p.first.data(), results.data(), results.size(), p.first.size()); + trie_results_size_ = std::max(trie_results_size_, num_nodes); + } + + if (trie_results_size_ == 0) + status_ = NO_ENTRY_FOUND; + } + + // Non-virtual (inlined) implementation for faster execution. + inline float GetScoreInlined(int id) const { + return piece_score_pairs[id].second; + } + + inline bool IsUnusedInlined(int id) const { + return false; // TODO + } + + inline bool IsUserDefinedInlined(int id) const { + return false; // TODO + } + + inline size_t OneCharLen(const char* src) const { + return "\1\1\1\1\1\1\1\1\1\1\1\1\2\2\3\4"[(*src & 0xFF) >> 4]; + } + + // The optimized Viterbi encode. + // Main differences from the original function: + // 1. Memorizes the best path at each postion so far, + // 2. No need to store the Lattice nodes, + // 3. Works in utf-8 directly, + // 4. Defines a new struct with fewer fields than Lattice, + // 5. Does not depend on `class Lattice` nor call `SetSentence()`, + // `PopulateNodes()`, or `Viterbi()`. It does everything in one function. + // For detailed explanations please see the comments inside the function body. + EncodeResult EncodeOptimized(const std::string& normalized) const { + // An optimized Viterbi algorithm for unigram language models. Benchmarking + // results show that it generates almost identical outputs and achieves 2.1x + // speedup on average for 102 languages compared to the original + // implementation. It's based on the following three ideas: + // + // 1. Because it uses the *unigram* model: + // best_score(x1, x2, …, xt) = best_score(x1, x2, …, x{t-1}) + score(xt) + // Deciding the best path (and score) can be decoupled into two isolated + // terms: (a) the best path ended before the last token `best_score(x1, x2, …, + // x{t-1})`, and (b) the last token and its `score(xt)`. The two terms are + // not related to each other at all. + // + // Therefore, we can compute once and store the *best_path ending at + // each character position*. In this way, when we know best_path_ends_at[M], + // we can reuse it to compute all the best_path_ends_at_[...] where the last + // token starts at the same character position M. + // + // This improves the time complexity from O(n*k*k) to O(n*k) because it + // eliminates the extra loop of recomputing the best path ending at the same + // position, where n is the input length and k is the maximum number of tokens + // that can be recognized starting at each position. + // + // 2. Again, because it uses the *unigram* model, we don’t need to actually + // store the lattice nodes. We still recognize all the tokens and lattice + // nodes from the input, but along identifying them, we use and discard them + // on the fly. There is no need to actually store them for best path Viterbi + // decoding. The only thing we need to store is the best_path ending at + // each character position. + // + // This improvement reduces the things needed to store in memory from O(n*k) + // to O(n), where n is the input length and k is the maximum number of tokens + // that can be recognized starting at each position. + // + // It also avoids the need of dynamic-size lattice node pool, because the + // number of things to store is fixed as n. + // + // 3. SentencePiece is designed to work with unicode, taking utf-8 encoding + // inputs. In the original implementation, the lattice positions are based on + // unicode positions. A mapping from unicode position to the utf-8 position is + // maintained to recover the utf-8 string piece. + // + // We found that it is sufficient and beneficial to directly work with utf-8 + // positions: + // + // Firstly, it saves the conversion and mapping between unicode positions and + // utf-8 positions. + // + // Secondly, it reduces the number of fields we need to maintain in the + // node/path structure. Specifically, there are 8 fields defined in + // `Lattice::Node` used by the original encoder, but here in the optimized + // encoder we only need to define 3 fields in `BestPathNode`. + + if (status() != OK || normalized.empty()) { + return {}; + } + // Represents the last node of the best path. + struct BestPathNode { + int id = -1; // The vocab id. (maybe -1 for UNK) + float best_path_score = + 0; // The total score of the best path ending at this node. + int starts_at = + -1; // The starting position (in utf-8) of this node. The entire best + // path can be constructed by backtracking along this link. + }; + const int size = normalized.size(); + const float unk_score = min_score() - kUnkPenalty; + // The ends are exclusive. + std::vector best_path_ends_at(size + 1); + // Generate lattice on-the-fly (not stored) and update best_path_ends_at. + int starts_at = 0; + while (starts_at < size) { + std::size_t node_pos = 0; + std::size_t key_pos = starts_at; + const auto best_path_score_till_here = + best_path_ends_at[starts_at].best_path_score; + bool has_single_node = false; + const int mblen = + std::min(OneCharLen(normalized.data() + starts_at), + size - starts_at); + while (key_pos < size) { + const int ret = + trie_->traverse(normalized.data(), node_pos, key_pos, key_pos + 1); + if (ret == -2) + break; + if (ret >= 0) { + if (IsUnusedInlined(ret)) + continue; + // Update the best path node. + auto& target_node = best_path_ends_at[key_pos]; + const auto length = (key_pos - starts_at); + // User defined symbol receives extra bonus to always be selected. + const auto score = IsUserDefinedInlined(ret) + ? (length * max_score_ - 0.1) + : GetScoreInlined(ret); + const auto candidate_best_path_score = + score + best_path_score_till_here; + if (target_node.starts_at == -1 || + candidate_best_path_score > target_node.best_path_score) { + target_node.best_path_score = candidate_best_path_score; + target_node.starts_at = starts_at; + target_node.id = ret; + } + if (!has_single_node && length == mblen) { + has_single_node = true; + } + } + } + if (!has_single_node) { + auto& target_node = best_path_ends_at[starts_at + mblen]; + const auto candidate_best_path_score = + unk_score + best_path_score_till_here; + if (target_node.starts_at == -1 || + candidate_best_path_score > target_node.best_path_score) { + target_node.best_path_score = candidate_best_path_score; + target_node.starts_at = starts_at; + target_node.id = unk_id_; + } + } + // Move by one unicode character. + starts_at += mblen; + } + // Backtrack to identify the best path. + EncodeResult results; + int ends_at = size; + while (ends_at > 0) { + const auto& node = best_path_ends_at[ends_at]; + results.emplace_back( + normalized.substr(node.starts_at, ends_at - node.starts_at), node.id); + ends_at = node.starts_at; + } + std::reverse(results.begin(), results.end()); + return results; + } + +public: + explicit T5UniGramTokenizer(const std::string& json_str = "") { + if (json_str.size() != 0) { + InitializePieces(json_str); + } else { + InitializePieces(ModelLoader::load_t5_tokenizer_json()); + } + + min_score_ = FLT_MAX; + max_score_ = FLT_MIN; + + std::vector> pieces; + for (int i = 0; i < piece_score_pairs.size(); i++) { + const auto& sp = piece_score_pairs[i]; + + min_score_ = std::min(min_score_, sp.second); + max_score_ = std::max(max_score_, sp.second); + + pieces.emplace_back(sp.first, i); + } + + BuildTrie(&pieces); + } + ~T5UniGramTokenizer(){}; + + std::string Normalize(const std::string& input) const { + // Ref: https://github.com/huggingface/tokenizers/blob/1ff56c0c70b045f0cd82da1af9ac08cd4c7a6f9f/bindings/python/py_src/tokenizers/implementations/sentencepiece_unigram.py#L29 + // TODO: nmt-nfkc + std::string normalized = std::regex_replace(input, std::regex(" {2,}"), " "); + return normalized; + } + + std::vector Encode(const std::string& input, bool append_eos_if_not_present = true) const { + std::string normalized = Normalize(input); + normalized = pre_tokenizer.tokenize(normalized); + EncodeResult result = EncodeOptimized(normalized); + if (result.size() > 0 && append_eos_if_not_present) { + auto item = result[result.size() - 1]; + if (item.first != eos_token_) { + result.emplace_back(eos_token_, eos_id_); + } + } + std::vector tokens; + for (auto item : result) { + tokens.push_back(item.second); + } + return tokens; + } + + void pad_tokens(std::vector& tokens, + std::vector& weights, + std::vector* attention_mask, + size_t max_length = 0, + bool padding = false) { + if (max_length > 0 && padding) { + size_t orig_token_num = tokens.size() - 1; + size_t n = std::ceil(orig_token_num * 1.0 / (max_length - 1)); + if (n == 0) { + n = 1; + } + size_t length = max_length * n; + LOG_DEBUG("token length: %llu", length); + std::vector new_tokens; + std::vector new_weights; + std::vector new_attention_mask; + int token_idx = 0; + for (int i = 0; i < length; i++) { + if (token_idx >= orig_token_num) { + break; + } + if (attention_mask != nullptr) { + new_attention_mask.push_back(0.0); + } + if (i % max_length == max_length - 1) { + new_tokens.push_back(eos_id_); + new_weights.push_back(1.0); + } else { + new_tokens.push_back(tokens[token_idx]); + new_weights.push_back(weights[token_idx]); + token_idx++; + } + } + + new_tokens.push_back(eos_id_); + new_weights.push_back(1.0); + if (attention_mask != nullptr) { + new_attention_mask.push_back(0.0); + } + + tokens = new_tokens; + weights = new_weights; + if (attention_mask != nullptr) { + *attention_mask = new_attention_mask; + } + + if (padding) { + int pad_token_id = pad_id_; + tokens.insert(tokens.end(), length - tokens.size(), pad_token_id); + weights.insert(weights.end(), length - weights.size(), 1.0); + if (attention_mask != nullptr) { + // maybe keep some padding tokens unmasked? + attention_mask->insert(attention_mask->end(), length - attention_mask->size(), -HUGE_VALF); + } + } + } + } + + // Returns the minimum score in sentence pieces. + // min_score() - 10 is used for the cost of unknown sentence. + float min_score() const { return min_score_; } + + // Returns the maximum score in sentence pieces. + // max_score() is used for the cost of user defined symbols. + float max_score() const { return max_score_; } + + Status status() const { return status_; } +}; + +class T5LayerNorm : public UnaryBlock { +protected: + int64_t hidden_size; + float eps; + + void init_params(struct ggml_context* ctx, std::map& tensor_types, const std::string prefix = "") { + enum ggml_type wtype = GGML_TYPE_F32; //(tensor_types.find(prefix + "weight") != tensor_types.end()) ? tensor_types[prefix + "weight"] : GGML_TYPE_F32; + params["weight"] = ggml_new_tensor_1d(ctx, wtype, hidden_size); + } + +public: + T5LayerNorm(int64_t hidden_size, + float eps = 1e-06f) + : hidden_size(hidden_size), + eps(eps) {} + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + struct ggml_tensor* w = params["weight"]; + x = ggml_rms_norm(ctx, x, eps); + x = ggml_mul(ctx, x, w); + return x; + } +}; + +struct T5DenseActDense : public UnaryBlock { +public: + T5DenseActDense(int64_t model_dim, int64_t ff_dim) { + blocks["wi"] = std::shared_ptr(new Linear(model_dim, ff_dim, false)); + blocks["wo"] = std::shared_ptr(new Linear(ff_dim, model_dim, false)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, n_token, model_dim] + auto wi = std::dynamic_pointer_cast(blocks["wi"]); + auto wo = std::dynamic_pointer_cast(blocks["wo"]); + + x = wi->forward(ctx, x); + x = ggml_relu_inplace(ctx, x); + x = wo->forward(ctx, x); + return x; + } +}; + +struct T5DenseGatedActDense : public UnaryBlock { +public: + T5DenseGatedActDense(int64_t model_dim, int64_t ff_dim) { + blocks["wi_0"] = std::shared_ptr(new Linear(model_dim, ff_dim, false)); + blocks["wi_1"] = std::shared_ptr(new Linear(model_dim, ff_dim, false)); + blocks["wo"] = std::shared_ptr(new Linear(ff_dim, model_dim, false)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, n_token, model_dim] + auto wi_0 = std::dynamic_pointer_cast(blocks["wi_0"]); + auto wi_1 = std::dynamic_pointer_cast(blocks["wi_1"]); + auto wo = std::dynamic_pointer_cast(blocks["wo"]); + + auto hidden_gelu = ggml_gelu_inplace(ctx, wi_0->forward(ctx, x)); + auto hidden_linear = wi_1->forward(ctx, x); + x = ggml_mul_inplace(ctx, hidden_gelu, hidden_linear); + x = wo->forward(ctx, x); + return x; + } +}; + +struct T5LayerFF : public UnaryBlock { +public: + T5LayerFF(int64_t model_dim, int64_t ff_dim) { + blocks["DenseReluDense"] = std::shared_ptr(new T5DenseGatedActDense(model_dim, ff_dim)); + blocks["layer_norm"] = std::shared_ptr(new T5LayerNorm(model_dim)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [N, n_token, model_dim] + auto DenseReluDense = std::dynamic_pointer_cast(blocks["DenseReluDense"]); + auto layer_norm = std::dynamic_pointer_cast(blocks["layer_norm"]); + + auto forwarded_states = layer_norm->forward(ctx, x); + forwarded_states = DenseReluDense->forward(ctx, forwarded_states); + x = ggml_add_inplace(ctx, forwarded_states, x); + return x; + } +}; + +class T5Attention : public GGMLBlock { +protected: + int64_t model_dim; + int64_t inner_dim; + int64_t num_heads; + bool using_relative_attention_bias; + int64_t relative_attention_num_buckets = 32; + int64_t relative_attention_max_distance = 128; + +public: + T5Attention(int64_t model_dim, + int64_t inner_dim, + int64_t num_heads, + bool using_relative_attention_bias = false) + : model_dim(model_dim), + inner_dim(inner_dim), + num_heads(num_heads), + using_relative_attention_bias(using_relative_attention_bias) { + blocks["q"] = std::shared_ptr(new Linear(model_dim, inner_dim, false)); + blocks["k"] = std::shared_ptr(new Linear(model_dim, inner_dim, false)); + blocks["v"] = std::shared_ptr(new Linear(model_dim, inner_dim, false)); + blocks["o"] = std::shared_ptr(new Linear(inner_dim, model_dim, false)); + if (using_relative_attention_bias) { + blocks["relative_attention_bias"] = std::shared_ptr(new Embedding(relative_attention_num_buckets, num_heads)); + } + } + + struct ggml_tensor* compute_bias(struct ggml_context* ctx, + struct ggml_tensor* relative_position_bucket) { + auto relative_attention_bias = std::dynamic_pointer_cast(blocks["relative_attention_bias"]); + + auto values = relative_attention_bias->forward(ctx, relative_position_bucket); // shape (query_length, key_length, num_heads) + values = ggml_cont(ctx, ggml_permute(ctx, values, 2, 0, 1, 3)); // shape (1, num_heads, query_length, key_length) + return values; + } + + // x: [N, n_token, model_dim] + std::pair forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* past_bias = NULL, + struct ggml_tensor* mask = NULL, + struct ggml_tensor* relative_position_bucket = NULL) { + auto q_proj = std::dynamic_pointer_cast(blocks["q"]); + auto k_proj = std::dynamic_pointer_cast(blocks["k"]); + auto v_proj = std::dynamic_pointer_cast(blocks["v"]); + auto out_proj = std::dynamic_pointer_cast(blocks["o"]); + + int64_t n_head = num_heads; + int64_t d_head = inner_dim / n_head; + + auto q = q_proj->forward(ctx, x); + auto k = k_proj->forward(ctx, x); + auto v = v_proj->forward(ctx, x); + + if (using_relative_attention_bias && relative_position_bucket != NULL) { + past_bias = compute_bias(ctx, relative_position_bucket); + } + if (past_bias != NULL) { + if (mask != NULL) { + mask = ggml_repeat(ctx, mask, past_bias); + mask = ggml_add(ctx, mask, past_bias); + } else { + mask = past_bias; + } + } + + k = ggml_scale_inplace(ctx, k, sqrt(d_head)); + + x = ggml_nn_attention_ext(ctx, q, k, v, num_heads, mask); // [N, n_token, d_head * n_head] + + x = out_proj->forward(ctx, x); // [N, n_token, model_dim] + return {x, past_bias}; + } +}; + +struct T5LayerSelfAttention : public GGMLBlock { +public: + T5LayerSelfAttention(int64_t model_dim, + int64_t inner_dim, + int64_t ff_dim, + int64_t num_heads, + bool using_relative_attention_bias) { + blocks["SelfAttention"] = std::shared_ptr(new T5Attention(model_dim, inner_dim, num_heads, using_relative_attention_bias)); + blocks["layer_norm"] = std::shared_ptr(new T5LayerNorm(model_dim)); + } + + std::pair forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* past_bias = NULL, + struct ggml_tensor* mask = NULL, + struct ggml_tensor* relative_position_bucket = NULL) { + // x: [N, n_token, model_dim] + auto SelfAttention = std::dynamic_pointer_cast(blocks["SelfAttention"]); + auto layer_norm = std::dynamic_pointer_cast(blocks["layer_norm"]); + + auto normed_hidden_state = layer_norm->forward(ctx, x); + auto ret = SelfAttention->forward(ctx, normed_hidden_state, past_bias, mask, relative_position_bucket); + auto output = ret.first; + past_bias = ret.second; + + x = ggml_add_inplace(ctx, output, x); + return {x, past_bias}; + } +}; + +struct T5Block : public GGMLBlock { +public: + T5Block(int64_t model_dim, int64_t inner_dim, int64_t ff_dim, int64_t num_heads, bool using_relative_attention_bias) { + blocks["layer.0"] = std::shared_ptr(new T5LayerSelfAttention(model_dim, inner_dim, ff_dim, num_heads, using_relative_attention_bias)); + blocks["layer.1"] = std::shared_ptr(new T5LayerFF(model_dim, ff_dim)); + } + + std::pair forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* past_bias = NULL, + struct ggml_tensor* mask = NULL, + struct ggml_tensor* relative_position_bucket = NULL) { + // x: [N, n_token, model_dim] + auto layer_0 = std::dynamic_pointer_cast(blocks["layer.0"]); + auto layer_1 = std::dynamic_pointer_cast(blocks["layer.1"]); + + auto ret = layer_0->forward(ctx, x, past_bias, mask, relative_position_bucket); + x = ret.first; + past_bias = ret.second; + x = layer_1->forward(ctx, x); + return {x, past_bias}; + } +}; + +struct T5Stack : public GGMLBlock { + int64_t num_layers; + +public: + T5Stack(int64_t num_layers, + int64_t model_dim, + int64_t inner_dim, + int64_t ff_dim, + int64_t num_heads) + : num_layers(num_layers) { + for (int i = 0; i < num_layers; i++) { + blocks["block." + std::to_string(i)] = std::shared_ptr(new T5Block(model_dim, inner_dim, ff_dim, num_heads, i == 0)); + } + + blocks["final_layer_norm"] = std::shared_ptr(new T5LayerNorm(model_dim)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* x, + struct ggml_tensor* past_bias = NULL, + struct ggml_tensor* attention_mask = NULL, + struct ggml_tensor* relative_position_bucket = NULL) { + // x: [N, n_token, model_dim] + for (int i = 0; i < num_layers; i++) { + auto block = std::dynamic_pointer_cast(blocks["block." + std::to_string(i)]); + + auto ret = block->forward(ctx, x, past_bias, attention_mask, relative_position_bucket); + x = ret.first; + past_bias = ret.second; + } + + auto final_layer_norm = std::dynamic_pointer_cast(blocks["final_layer_norm"]); + + x = final_layer_norm->forward(ctx, x); + return x; + } +}; + +struct T5 : public GGMLBlock { +public: + T5(int64_t num_layers, + int64_t model_dim, + int64_t ff_dim, + int64_t num_heads, + int64_t vocab_size) { + blocks["encoder"] = std::shared_ptr(new T5Stack(num_layers, model_dim, model_dim, ff_dim, num_heads)); + blocks["shared"] = std::shared_ptr(new Embedding(vocab_size, model_dim)); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids, + struct ggml_tensor* past_bias = NULL, + struct ggml_tensor* attention_mask = NULL, + struct ggml_tensor* relative_position_bucket = NULL) { + // input_ids: [N, n_token] + + auto shared = std::dynamic_pointer_cast(blocks["shared"]); + auto encoder = std::dynamic_pointer_cast(blocks["encoder"]); + + auto x = shared->forward(ctx, input_ids); + x = encoder->forward(ctx, x, past_bias, attention_mask, relative_position_bucket); + return x; + } +}; + +struct T5Runner : public GGMLRunner { + T5 model; + std::vector relative_position_bucket_vec; + + T5Runner(ggml_backend_t backend, + std::map& tensor_types, + const std::string prefix, + int64_t num_layers = 24, + int64_t model_dim = 4096, + int64_t ff_dim = 10240, + int64_t num_heads = 64, + int64_t vocab_size = 32128) + : GGMLRunner(backend), model(num_layers, model_dim, ff_dim, num_heads, vocab_size) { + model.init(params_ctx, tensor_types, prefix); + } + + std::string get_desc() { + return "t5"; + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + model.get_param_tensors(tensors, prefix); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, + struct ggml_tensor* input_ids, + struct ggml_tensor* relative_position_bucket, + struct ggml_tensor* attention_mask = NULL) { + size_t N = input_ids->ne[1]; + size_t n_token = input_ids->ne[0]; + + auto hidden_states = model.forward(ctx, input_ids, NULL, attention_mask, relative_position_bucket); // [N, n_token, model_dim] + return hidden_states; + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* input_ids, + struct ggml_tensor* attention_mask = NULL) { + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + + input_ids = to_backend(input_ids); + + relative_position_bucket_vec = compute_relative_position_bucket(input_ids->ne[0], input_ids->ne[0]); + + // for (int i = 0; i < relative_position_bucket_vec.size(); i++) { + // if (i % 77 == 0) { + // printf("\n"); + // } + // printf("%d ", relative_position_bucket_vec[i]); + // } + + auto relative_position_bucket = ggml_new_tensor_2d(compute_ctx, + GGML_TYPE_I32, + input_ids->ne[0], + input_ids->ne[0]); + set_backend_tensor_data(relative_position_bucket, relative_position_bucket_vec.data()); + + struct ggml_tensor* hidden_states = forward(compute_ctx, input_ids, relative_position_bucket, attention_mask); + + ggml_build_forward_expand(gf, hidden_states); + + return gf; + } + + void compute(const int n_threads, + struct ggml_tensor* input_ids, + struct ggml_tensor* attention_mask, + ggml_tensor** output, + ggml_context* output_ctx = NULL) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(input_ids, attention_mask); + }; + GGMLRunner::compute(get_graph, n_threads, true, output, output_ctx); + } + + static std::vector _relative_position_bucket(const std::vector& relative_position, + bool bidirectional = true, + int num_buckets = 32, + int max_distance = 128) { + std::vector relative_buckets(relative_position.size(), 0); + std::vector abs_relative_position = relative_position; + + if (bidirectional) { + num_buckets = num_buckets / 2; + for (size_t i = 0; i < relative_position.size(); ++i) { + if (relative_position[i] > 0) { + relative_buckets[i] += num_buckets; + } + abs_relative_position[i] = std::abs(relative_position[i]); + } + } else { + for (size_t i = 0; i < relative_position.size(); ++i) { + abs_relative_position[i] = std::max(-relative_position[i], 0); + } + } + + int max_exact = num_buckets / 2; + std::vector relative_position_if_large(relative_position.size(), 0); + + for (size_t i = 0; i < relative_position.size(); ++i) { + if (abs_relative_position[i] < max_exact) { + relative_buckets[i] += abs_relative_position[i]; + } else { + float log_pos = std::log(static_cast(abs_relative_position[i]) / max_exact); + float log_base = std::log(static_cast(max_distance) / max_exact); + relative_position_if_large[i] = max_exact + static_cast((log_pos / log_base) * (num_buckets - max_exact)); + relative_position_if_large[i] = std::min(relative_position_if_large[i], num_buckets - 1); + relative_buckets[i] += relative_position_if_large[i]; + } + } + + return relative_buckets; + } + + std::vector compute_relative_position_bucket(int query_length, + int key_length) { + std::vector context_position(query_length); + std::vector memory_position(key_length); + + for (int i = 0; i < query_length; ++i) { + context_position[i] = i; + } + for (int i = 0; i < key_length; ++i) { + memory_position[i] = i; + } + + std::vector> relative_position(query_length, std::vector(key_length, 0)); + for (int i = 0; i < query_length; ++i) { + for (int j = 0; j < key_length; ++j) { + relative_position[i][j] = memory_position[j] - context_position[i]; + } + } + + std::vector relative_position_bucket; + for (int i = 0; i < query_length; ++i) { + std::vector result = _relative_position_bucket(relative_position[i], true); + relative_position_bucket.insert(relative_position_bucket.end(), result.begin(), result.end()); + } + + return relative_position_bucket; + } +}; + +struct T5Embedder { + T5UniGramTokenizer tokenizer; + T5Runner model; + + static std::map empty_tensor_types; + + T5Embedder(ggml_backend_t backend, + std::map& tensor_types = empty_tensor_types, + const std::string prefix = "", + int64_t num_layers = 24, + int64_t model_dim = 4096, + int64_t ff_dim = 10240, + int64_t num_heads = 64, + int64_t vocab_size = 32128) + : model(backend, tensor_types, prefix, num_layers, model_dim, ff_dim, num_heads, vocab_size) { + } + + void get_param_tensors(std::map& tensors, const std::string prefix) { + model.get_param_tensors(tensors, prefix); + } + + void alloc_params_buffer() { + model.alloc_params_buffer(); + } + + std::tuple, std::vector, std::vector> tokenize(std::string text, + size_t max_length = 0, + bool padding = false) { + auto parsed_attention = parse_prompt_attention(text); + + { + std::stringstream ss; + ss << "["; + for (const auto& item : parsed_attention) { + ss << "['" << item.first << "', " << item.second << "], "; + } + ss << "]"; + LOG_DEBUG("parse '%s' to %s", text.c_str(), ss.str().c_str()); + } + + std::vector tokens; + std::vector weights; + for (const auto& item : parsed_attention) { + const std::string& curr_text = item.first; + float curr_weight = item.second; + std::vector curr_tokens = tokenizer.Encode(curr_text, false); + tokens.insert(tokens.end(), curr_tokens.begin(), curr_tokens.end()); + weights.insert(weights.end(), curr_tokens.size(), curr_weight); + } + + int EOS_TOKEN_ID = 1; + tokens.push_back(EOS_TOKEN_ID); + weights.push_back(1.0); + + std::vector attention_mask; + + tokenizer.pad_tokens(tokens, weights, &attention_mask, max_length, padding); + + // for (int i = 0; i < tokens.size(); i++) { + // std::cout << tokens[i] << ":" << weights[i] << ", "; + // } + // std::cout << std::endl; + + return {tokens, weights, attention_mask}; + } + + void test() { + struct ggml_init_params params; + params.mem_size = static_cast(10 * 1024 * 1024); // 10 MB + params.mem_buffer = NULL; + params.no_alloc = false; + + struct ggml_context* work_ctx = ggml_init(params); + GGML_ASSERT(work_ctx != NULL); + + { + // cpu f16: pass + // cpu f32: pass + // cuda f16: nan + // cuda f32: pass + // cuda q8_0: nan + // TODO: fix cuda nan + std::string text("a lovely cat"); + auto tokens_and_weights = tokenize(text, 77, true); + std::vector& tokens = std::get<0>(tokens_and_weights); + std::vector& weights = std::get<1>(tokens_and_weights); + for (auto token : tokens) { + printf("%d ", token); + } + printf("\n"); + auto input_ids = vector_to_ggml_tensor_i32(work_ctx, tokens); + struct ggml_tensor* out = NULL; + + int t0 = ggml_time_ms(); + model.compute(8, input_ids, NULL, &out, work_ctx); + int t1 = ggml_time_ms(); + + print_ggml_tensor(out); + LOG_DEBUG("t5 test done in %dms", t1 - t0); + } + } + + static void load_from_file_and_test(const std::string& file_path) { + // ggml_backend_t backend = ggml_backend_cuda_init(0); + ggml_backend_t backend = ggml_backend_cpu_init(); + ggml_type model_data_type = GGML_TYPE_F32; + std::shared_ptr t5 = std::shared_ptr(new T5Embedder(backend)); + { + LOG_INFO("loading from '%s'", file_path.c_str()); + + t5->alloc_params_buffer(); + std::map tensors; + t5->get_param_tensors(tensors, ""); + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path)) { + LOG_ERROR("init model loader from file failed: '%s'", file_path.c_str()); + return; + } + + bool success = model_loader.load_tensors(tensors, backend); + + if (!success) { + LOG_ERROR("load tensors from model loader failed"); + return; + } + + LOG_INFO("t5 model loaded"); + } + t5->test(); + } +}; + +#endif // __T5_HPP__ \ No newline at end of file diff --git a/tae.hpp b/tae.hpp new file mode 100644 index 000000000..678c44c57 --- /dev/null +++ b/tae.hpp @@ -0,0 +1,261 @@ +#ifndef __TAE_HPP__ +#define __TAE_HPP__ + +#include "ggml_extend.hpp" + +#include "model.h" + +/* + =================================== TinyAutoEncoder =================================== + References: + https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/autoencoders/vae.py + https://github.com/madebyollin/taesd/blob/main/taesd.py + +*/ + +class TAEBlock : public UnaryBlock { +protected: + int n_in; + int n_out; + +public: + TAEBlock(int n_in, int n_out) + : n_in(n_in), n_out(n_out) { + blocks["conv.0"] = std::shared_ptr(new Conv2d(n_in, n_out, {3, 3}, {1, 1}, {1, 1})); + blocks["conv.2"] = std::shared_ptr(new Conv2d(n_out, n_out, {3, 3}, {1, 1}, {1, 1})); + blocks["conv.4"] = std::shared_ptr(new Conv2d(n_out, n_out, {3, 3}, {1, 1}, {1, 1})); + if (n_in != n_out) { + blocks["skip"] = std::shared_ptr(new Conv2d(n_in, n_out, {1, 1}, {1, 1}, {1, 1}, {1, 1}, false)); + } + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [n, n_in, h, w] + // return: [n, n_out, h, w] + + auto conv_0 = std::dynamic_pointer_cast(blocks["conv.0"]); + auto conv_2 = std::dynamic_pointer_cast(blocks["conv.2"]); + auto conv_4 = std::dynamic_pointer_cast(blocks["conv.4"]); + + auto h = conv_0->forward(ctx, x); + h = ggml_relu_inplace(ctx, h); + h = conv_2->forward(ctx, h); + h = ggml_relu_inplace(ctx, h); + h = conv_4->forward(ctx, h); + + if (n_in != n_out) { + auto skip = std::dynamic_pointer_cast(blocks["skip"]); + LOG_DEBUG("skip"); + x = skip->forward(ctx, x); + } + + h = ggml_add(ctx, h, x); + h = ggml_relu_inplace(ctx, h); + return h; + } +}; + +class TinyEncoder : public UnaryBlock { + int in_channels = 3; + int channels = 64; + int z_channels = 4; + int num_blocks = 3; + +public: + TinyEncoder(int z_channels = 4) + : z_channels(z_channels) { + int index = 0; + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(in_channels, channels, {3, 3}, {1, 1}, {1, 1})); + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {2, 2}, {1, 1}, {1, 1}, false)); + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {2, 2}, {1, 1}, {1, 1}, false)); + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {2, 2}, {1, 1}, {1, 1}, false)); + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, z_channels, {3, 3}, {1, 1}, {1, 1})); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* x) { + // x: [n, in_channels, h, w] + // return: [n, z_channels, h/8, w/8] + + for (int i = 0; i < num_blocks * 3 + 6; i++) { + auto block = std::dynamic_pointer_cast(blocks[std::to_string(i)]); + + x = block->forward(ctx, x); + } + + return x; + } +}; + +class TinyDecoder : public UnaryBlock { + int z_channels = 4; + int channels = 64; + int out_channels = 3; + int num_blocks = 3; + +public: + TinyDecoder(int z_channels = 4) + : z_channels(z_channels) { + int index = 0; + + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(z_channels, channels, {3, 3}, {1, 1}, {1, 1})); + index++; // nn.ReLU() + + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + index++; // nn.Upsample() + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {1, 1}, {1, 1}, {1, 1}, false)); + + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + index++; // nn.Upsample() + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {1, 1}, {1, 1}, {1, 1}, false)); + + for (int i = 0; i < num_blocks; i++) { + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + } + index++; // nn.Upsample() + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, channels, {3, 3}, {1, 1}, {1, 1}, {1, 1}, false)); + + blocks[std::to_string(index++)] = std::shared_ptr(new TAEBlock(channels, channels)); + blocks[std::to_string(index++)] = std::shared_ptr(new Conv2d(channels, out_channels, {3, 3}, {1, 1}, {1, 1})); + } + + struct ggml_tensor* forward(struct ggml_context* ctx, struct ggml_tensor* z) { + // z: [n, z_channels, h, w] + // return: [n, out_channels, h*8, w*8] + + auto h = ggml_scale(ctx, z, 1.0f / 3.0f); + h = ggml_tanh_inplace(ctx, h); + h = ggml_scale(ctx, h, 3.0f); + + for (int i = 0; i < num_blocks * 3 + 10; i++) { + if (blocks.find(std::to_string(i)) == blocks.end()) { + if (i == 1) { + h = ggml_relu_inplace(ctx, h); + } else { + h = ggml_upscale(ctx, h, 2, GGML_SCALE_MODE_NEAREST); + } + continue; + } + auto block = std::dynamic_pointer_cast(blocks[std::to_string(i)]); + + h = block->forward(ctx, h); + } + + return h; + } +}; + +class TAESD : public GGMLBlock { +protected: + bool decode_only; + +public: + TAESD(bool decode_only = true, SDVersion version = VERSION_SD1) + : decode_only(decode_only) { + int z_channels = 4; + if (sd_version_is_dit(version)) { + z_channels = 16; + } + blocks["decoder.layers"] = std::shared_ptr(new TinyDecoder(z_channels)); + + if (!decode_only) { + blocks["encoder.layers"] = std::shared_ptr(new TinyEncoder(z_channels)); + } + } + + struct ggml_tensor* decode(struct ggml_context* ctx, struct ggml_tensor* z) { + auto decoder = std::dynamic_pointer_cast(blocks["decoder.layers"]); + return decoder->forward(ctx, z); + } + + struct ggml_tensor* encode(struct ggml_context* ctx, struct ggml_tensor* x) { + auto encoder = std::dynamic_pointer_cast(blocks["encoder.layers"]); + return encoder->forward(ctx, x); + } +}; + +struct TinyAutoEncoder : public GGMLRunner { + TAESD taesd; + bool decode_only = false; + + TinyAutoEncoder(ggml_backend_t backend, + std::map& tensor_types, + const std::string prefix, + bool decoder_only = true, + SDVersion version = VERSION_SD1) + : decode_only(decoder_only), + taesd(decoder_only, version), + GGMLRunner(backend) { + taesd.init(params_ctx, tensor_types, prefix); + } + + std::string get_desc() { + return "taesd"; + } + + bool load_from_file(const std::string& file_path) { + LOG_INFO("loading taesd from '%s', decode_only = %s", file_path.c_str(), decode_only ? "true" : "false"); + alloc_params_buffer(); + std::map taesd_tensors; + taesd.get_param_tensors(taesd_tensors); + std::set ignore_tensors; + if (decode_only) { + ignore_tensors.insert("encoder."); + } + + ModelLoader model_loader; + if (!model_loader.init_from_file(file_path)) { + LOG_ERROR("init taesd model loader from file failed: '%s'", file_path.c_str()); + return false; + } + + bool success = model_loader.load_tensors(taesd_tensors, backend, ignore_tensors); + + if (!success) { + LOG_ERROR("load tae tensors from model loader failed"); + return false; + } + + LOG_INFO("taesd model loaded"); + return success; + } + + struct ggml_cgraph* build_graph(struct ggml_tensor* z, bool decode_graph) { + struct ggml_cgraph* gf = ggml_new_graph(compute_ctx); + z = to_backend(z); + struct ggml_tensor* out = decode_graph ? taesd.decode(compute_ctx, z) : taesd.encode(compute_ctx, z); + ggml_build_forward_expand(gf, out); + return gf; + } + + void compute(const int n_threads, + struct ggml_tensor* z, + bool decode_graph, + struct ggml_tensor** output, + struct ggml_context* output_ctx = NULL) { + auto get_graph = [&]() -> struct ggml_cgraph* { + return build_graph(z, decode_graph); + }; + + GGMLRunner::compute(get_graph, n_threads, false, output, output_ctx); + } +}; + +#endif // __TAE_HPP__ \ No newline at end of file diff --git a/thirdparty/.clang-format b/thirdparty/.clang-format new file mode 100644 index 000000000..47a38a93f --- /dev/null +++ b/thirdparty/.clang-format @@ -0,0 +1,2 @@ +DisableFormat: true +SortIncludes: Never diff --git a/thirdparty/CMakeLists.txt b/thirdparty/CMakeLists.txt new file mode 100644 index 000000000..77274c336 --- /dev/null +++ b/thirdparty/CMakeLists.txt @@ -0,0 +1,3 @@ +set(Z_TARGET zip) +add_library(${Z_TARGET} OBJECT zip.c zip.h miniz.h) +target_include_directories(${Z_TARGET} PUBLIC .) \ No newline at end of file diff --git a/thirdparty/LICENSE.darts_clone.txt b/thirdparty/LICENSE.darts_clone.txt new file mode 100644 index 000000000..28203b3b2 --- /dev/null +++ b/thirdparty/LICENSE.darts_clone.txt @@ -0,0 +1,10 @@ +Copyright (c) 2008-2011, Susumu Yata +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +- Neither the name of the nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/thirdparty/README.md b/thirdparty/README.md new file mode 100644 index 000000000..518dc18b1 --- /dev/null +++ b/thirdparty/README.md @@ -0,0 +1,3 @@ +- json.hpp library from: https://github.com/nlohmann/json +- ZIP Library from: https://github.com/kuba--/zip +- darts.h from: https://github.com/google/sentencepiece/tree/master/third_party/darts_clone \ No newline at end of file diff --git a/thirdparty/darts.h b/thirdparty/darts.h new file mode 100644 index 000000000..8892aaaa0 --- /dev/null +++ b/thirdparty/darts.h @@ -0,0 +1,1926 @@ +#ifndef DARTS_H_ +#define DARTS_H_ + +#include +#include +#include + +#define DARTS_VERSION "0.32" + +// DARTS_THROW() throws a whose message starts with the +// file name and the line number. For example, DARTS_THROW("error message") at +// line 123 of "darts.h" throws a which has a pointer to +// "darts.h:123: exception: error message". The message is available by using +// what() as well as that of . +#define DARTS_INT_TO_STR(value) #value +#define DARTS_LINE_TO_STR(line) DARTS_INT_TO_STR(line) +#define DARTS_LINE_STR DARTS_LINE_TO_STR(__LINE__) +#define DARTS_THROW(msg) throw Darts::Details::Exception( \ + __FILE__ ":" DARTS_LINE_STR ": exception: " msg) + +namespace Darts { + +// The following namespace hides the internal types and classes. +namespace Details { + +// This header assumes that and are 32-bit integer types. +// +// Darts-clone keeps values associated with keys. The type of the values is +// . Note that the values must be positive integers because the +// most significant bit (MSB) of each value is used to represent whether the +// corresponding unit is a leaf or not. Also, the keys are represented by +// sequences of s. is the unsigned type of . +typedef char char_type; +typedef unsigned char uchar_type; +typedef int value_type; + +// The main structure of Darts-clone is an array of s, and the +// unit type is actually a wrapper of . +typedef unsigned int id_type; + +// is the type of callback functions for reporting the +// progress of building a dictionary. See also build() of . +// The 1st argument receives the progress value and the 2nd argument receives +// the maximum progress value. A usage example is to show the progress +// percentage, 100.0 * (the 1st argument) / (the 2nd argument). +typedef int (*progress_func_type)(std::size_t, std::size_t); + +// is the type of double-array units and it is a wrapper of +// in practice. +class DoubleArrayUnit { + public: + DoubleArrayUnit() : unit_() {} + + // has_leaf() returns whether a leaf unit is immediately derived from the + // unit (true) or not (false). + bool has_leaf() const { + return ((unit_ >> 8) & 1) == 1; + } + // value() returns the value stored in the unit, and thus value() is + // available when and only when the unit is a leaf unit. + value_type value() const { + return static_cast(unit_ & ((1U << 31) - 1)); + } + + // label() returns the label associted with the unit. Note that a leaf unit + // always returns an invalid label. For this feature, leaf unit's label() + // returns an that has the MSB of 1. + id_type label() const { + return unit_ & ((1U << 31) | 0xFF); + } + // offset() returns the offset from the unit to its derived units. + id_type offset() const { + return (unit_ >> 10) << ((unit_ & (1U << 9)) >> 6); + } + + private: + id_type unit_; + + // Copyable. +}; + +// Darts-clone throws an for memory allocation failure, invalid +// arguments or a too large offset. The last case means that there are too many +// keys in the given set of keys. Note that the `msg' of must be a +// constant or static string because an keeps only a pointer to +// that string. +class Exception : public std::exception { + public: + explicit Exception(const char *msg = NULL) throw() : msg_(msg) {} + Exception(const Exception &rhs) throw() : msg_(rhs.msg_) {} + virtual ~Exception() throw() {} + + // overrides what() of . + virtual const char *what() const throw() { + return (msg_ != NULL) ? msg_ : ""; + } + + private: + const char *msg_; + + // Disallows operator=. + Exception &operator=(const Exception &); +}; + +} // namespace Details + +// is the interface of Darts-clone. Note that other +// classes should not be accessed from outside. +// +// has 4 template arguments but only the 3rd one is used as +// the type of values. Note that the given is used only from outside, and +// the internal value type is not changed from . +// In build(), given values are casted from to +// by using static_cast. On the other hand, values are casted from +// to in searching dictionaries. +template +class DoubleArrayImpl { + public: + // Even if this is changed, the internal value type is still + // . Other types, such as 64-bit integer types + // and floating-point number types, should not be used. + typedef T value_type; + // A key is reprenseted by a sequence of s. For example, + // exactMatchSearch() takes a . + typedef Details::char_type key_type; + // In searching dictionaries, the values associated with the matched keys are + // stored into or returned as s. + typedef value_type result_type; + + // enables applications to get the lengths of the matched + // keys in addition to the values. + struct result_pair_type { + value_type value; + std::size_t length; + }; + + // The constructor initializes member variables with 0 and NULLs. + DoubleArrayImpl() : size_(0), array_(NULL), buf_(NULL) {} + // The destructor frees memory allocated for units and then initializes + // member variables with 0 and NULLs. + virtual ~DoubleArrayImpl() { + clear(); + } + + // has 2 kinds of set_result()s. The 1st set_result() is to + // set a value to a . The 2nd set_result() is to set a value and + // a length to a . By using set_result()s, search methods + // can return the 2 kinds of results in the same way. + // Why the set_result()s are non-static? It is for compatibility. + // + // The 1st set_result() takes a length as the 3rd argument but it is not + // used. If a compiler does a good job, codes for getting the length may be + // removed. + void set_result(value_type *result, value_type value, std::size_t) const { + *result = value; + } + // The 2nd set_result() uses both `value' and `length'. + void set_result(result_pair_type *result, + value_type value, std::size_t length) const { + result->value = value; + result->length = length; + } + + // set_array() calls clear() in order to free memory allocated to the old + // array and then sets a new array. This function is useful to set a memory- + // mapped array. Note that the array set by set_array() is not freed in + // clear() and the destructor of . + // set_array() can also set the size of the new array but the size is not + // used in search methods. So it works well even if the 2nd argument is 0 or + // omitted. Remember that size() and total_size() returns 0 in such a case. + void set_array(const void *ptr, std::size_t size = 0) { + clear(); + array_ = static_cast(ptr); + size_ = size; + } + // array() returns a pointer to the array of units. + const void *array() const { + return array_; + } + + // clear() frees memory allocated to units and then initializes member + // variables with 0 and NULLs. Note that clear() does not free memory if the + // array of units was set by set_array(). In such a case, `array_' is not + // NULL and `buf_' is NULL. + void clear() { + size_ = 0; + array_ = NULL; + if (buf_ != NULL) { + delete[] buf_; + buf_ = NULL; + } + } + + // unit_size() returns the size of each unit. The size must be 4 bytes. + std::size_t unit_size() const { + return sizeof(unit_type); + } + // size() returns the number of units. It can be 0 if set_array() is used. + std::size_t size() const { + return size_; + } + // total_size() returns the number of bytes allocated to the array of units. + // It can be 0 if set_array() is used. + std::size_t total_size() const { + return unit_size() * size(); + } + // nonzero_size() exists for compatibility. It always returns the number of + // units because it takes long time to count the number of non-zero units. + std::size_t nonzero_size() const { + return size(); + } + + // build() constructs a dictionary from given key-value pairs. If `lengths' + // is NULL, `keys' is handled as an array of zero-terminated strings. If + // `values' is NULL, the index in `keys' is associated with each key, i.e. + // the ith key has (i - 1) as its value. + // Note that the key-value pairs must be arranged in key order and the values + // must not be negative. Also, if there are duplicate keys, only the first + // pair will be stored in the resultant dictionary. + // `progress_func' is a pointer to a callback function. If it is not NULL, + // it will be called in build() so that the caller can check the progress of + // dictionary construction. For details, please see the definition of + // . + // The return value of build() is 0, and it indicates the success of the + // operation. Otherwise, build() throws a , which is a + // derived class of . + // build() uses another construction algorithm if `values' is not NULL. In + // this case, Darts-clone uses a Directed Acyclic Word Graph (DAWG) instead + // of a trie because a DAWG is likely to be more compact than a trie. + int build(std::size_t num_keys, const key_type * const *keys, + const std::size_t *lengths = NULL, const value_type *values = NULL, + Details::progress_func_type progress_func = NULL); + + // open() reads an array of units from the specified file. And if it goes + // well, the old array will be freed and replaced with the new array read + // from the file. `offset' specifies the number of bytes to be skipped before + // reading an array. `size' specifies the number of bytes to be read from the + // file. If the `size' is 0, the whole file will be read. + // open() returns 0 iff the operation succeeds. Otherwise, it returns a + // non-zero value or throws a . The exception is thrown + // when and only when a memory allocation fails. + int open(const char *file_name, const char *mode = "rb", + std::size_t offset = 0, std::size_t size = 0); + // save() writes the array of units into the specified file. `offset' + // specifies the number of bytes to be skipped before writing the array. + // open() returns 0 iff the operation succeeds. Otherwise, it returns a + // non-zero value. + int save(const char *file_name, const char *mode = "wb", + std::size_t offset = 0) const; + + // The 1st exactMatchSearch() tests whether the given key exists or not, and + // if it exists, its value and length are set to `result'. Otherwise, the + // value and the length of `result' are set to -1 and 0 respectively. + // Note that if `length' is 0, `key' is handled as a zero-terminated string. + // `node_pos' specifies the start position of matching. This argument enables + // the combination of exactMatchSearch() and traverse(). For example, if you + // want to test "xyzA", "xyzBC", and "xyzDE", you can use traverse() to get + // the node position corresponding to "xyz" and then you can use + // exactMatchSearch() to test "A", "BC", and "DE" from that position. + // Note that the length of `result' indicates the length from the `node_pos'. + // In the above example, the lengths are { 1, 2, 2 }, not { 4, 5, 5 }. + template + void exactMatchSearch(const key_type *key, U &result, + std::size_t length = 0, std::size_t node_pos = 0) const { + result = exactMatchSearch(key, length, node_pos); + } + // The 2nd exactMatchSearch() returns a result instead of updating the 2nd + // argument. So, the following exactMatchSearch() has only 3 arguments. + template + inline U exactMatchSearch(const key_type *key, std::size_t length = 0, + std::size_t node_pos = 0) const; + + // commonPrefixSearch() searches for keys which match a prefix of the given + // string. If `length' is 0, `key' is handled as a zero-terminated string. + // The values and the lengths of at most `max_num_results' matched keys are + // stored in `results'. commonPrefixSearch() returns the number of matched + // keys. Note that the return value can be larger than `max_num_results' if + // there are more than `max_num_results' matches. If you want to get all the + // results, allocate more spaces and call commonPrefixSearch() again. + // `node_pos' works as well as in exactMatchSearch(). + template + inline std::size_t commonPrefixSearch(const key_type *key, U *results, + std::size_t max_num_results, std::size_t length = 0, + std::size_t node_pos = 0) const; + + // In Darts-clone, a dictionary is a deterministic finite-state automaton + // (DFA) and traverse() tests transitions on the DFA. The initial state is + // `node_pos' and traverse() chooses transitions labeled key[key_pos], + // key[key_pos + 1], ... in order. If there is not a transition labeled + // key[key_pos + i], traverse() terminates the transitions at that state and + // returns -2. Otherwise, traverse() ends without a termination and returns + // -1 or a nonnegative value, -1 indicates that the final state was not an + // accept state. When a nonnegative value is returned, it is the value + // associated with the final accept state. That is, traverse() returns the + // value associated with the given key if it exists. Note that traverse() + // updates `node_pos' and `key_pos' after each transition. + inline value_type traverse(const key_type *key, std::size_t &node_pos, + std::size_t &key_pos, std::size_t length = 0) const; + + private: + typedef Details::uchar_type uchar_type; + typedef Details::id_type id_type; + typedef Details::DoubleArrayUnit unit_type; + + std::size_t size_; + const unit_type *array_; + unit_type *buf_; + + // Disallows copy and assignment. + DoubleArrayImpl(const DoubleArrayImpl &); + DoubleArrayImpl &operator=(const DoubleArrayImpl &); +}; + +// is the typical instance of . It uses +// as the type of values and it is suitable for most cases. +typedef DoubleArrayImpl DoubleArray; + +// The interface section ends here. For using Darts-clone, there is no need +// to read the remaining section, which gives the implementation of +// Darts-clone. + +// +// Member functions of DoubleArrayImpl (except build()). +// + +template +int DoubleArrayImpl::open(const char *file_name, + const char *mode, std::size_t offset, std::size_t size) { +#ifdef _MSC_VER + std::FILE *file; + if (::fopen_s(&file, file_name, mode) != 0) { + return -1; + } +#else + std::FILE *file = std::fopen(file_name, mode); + if (file == NULL) { + return -1; + } +#endif + + if (size == 0) { + if (std::fseek(file, 0, SEEK_END) != 0) { + std::fclose(file); + return -1; + } + size = std::ftell(file) - offset; + } + + size /= unit_size(); + if (size < 256 || (size & 0xFF) != 0) { + std::fclose(file); + return -1; + } + + if (std::fseek(file, offset, SEEK_SET) != 0) { + std::fclose(file); + return -1; + } + + unit_type units[256]; + if (std::fread(units, unit_size(), 256, file) != 256) { + std::fclose(file); + return -1; + } + + if (units[0].label() != '\0' || units[0].has_leaf() || + units[0].offset() == 0 || units[0].offset() >= 512) { + std::fclose(file); + return -1; + } + for (id_type i = 1; i < 256; ++i) { + if (units[i].label() <= 0xFF && units[i].offset() >= size) { + std::fclose(file); + return -1; + } + } + + unit_type *buf; + try { + buf = new unit_type[size]; + for (id_type i = 0; i < 256; ++i) { + buf[i] = units[i]; + } + } catch (const std::bad_alloc &) { + std::fclose(file); + DARTS_THROW("failed to open double-array: std::bad_alloc"); + } + + if (size > 256) { + if (std::fread(buf + 256, unit_size(), size - 256, file) != size - 256) { + std::fclose(file); + delete[] buf; + return -1; + } + } + std::fclose(file); + + clear(); + + size_ = size; + array_ = buf; + buf_ = buf; + return 0; +} + +template +int DoubleArrayImpl::save(const char *file_name, + const char *mode, std::size_t) const { + if (size() == 0) { + return -1; + } + +#ifdef _MSC_VER + std::FILE *file; + if (::fopen_s(&file, file_name, mode) != 0) { + return -1; + } +#else + std::FILE *file = std::fopen(file_name, mode); + if (file == NULL) { + return -1; + } +#endif + + if (std::fwrite(array_, unit_size(), size(), file) != size()) { + std::fclose(file); + return -1; + } + std::fclose(file); + return 0; +} + +template +template +inline U DoubleArrayImpl::exactMatchSearch(const key_type *key, + std::size_t length, std::size_t node_pos) const { + U result; + set_result(&result, static_cast(-1), 0); + + unit_type unit = array_[node_pos]; + if (length != 0) { + for (std::size_t i = 0; i < length; ++i) { + node_pos ^= unit.offset() ^ static_cast(key[i]); + unit = array_[node_pos]; + if (unit.label() != static_cast(key[i])) { + return result; + } + } + } else { + for ( ; key[length] != '\0'; ++length) { + node_pos ^= unit.offset() ^ static_cast(key[length]); + unit = array_[node_pos]; + if (unit.label() != static_cast(key[length])) { + return result; + } + } + } + + if (!unit.has_leaf()) { + return result; + } + unit = array_[node_pos ^ unit.offset()]; + set_result(&result, static_cast(unit.value()), length); + return result; +} + +template +template +inline std::size_t DoubleArrayImpl::commonPrefixSearch( + const key_type *key, U *results, std::size_t max_num_results, + std::size_t length, std::size_t node_pos) const { + std::size_t num_results = 0; + + unit_type unit = array_[node_pos]; + node_pos ^= unit.offset(); + if (length != 0) { + for (std::size_t i = 0; i < length; ++i) { + node_pos ^= static_cast(key[i]); + unit = array_[node_pos]; + if (unit.label() != static_cast(key[i])) { + return num_results; + } + + node_pos ^= unit.offset(); + if (unit.has_leaf()) { + if (num_results < max_num_results) { + set_result(&results[num_results], static_cast( + array_[node_pos].value()), i + 1); + } + ++num_results; + } + } + } else { + for ( ; key[length] != '\0'; ++length) { + node_pos ^= static_cast(key[length]); + unit = array_[node_pos]; + if (unit.label() != static_cast(key[length])) { + return num_results; + } + + node_pos ^= unit.offset(); + if (unit.has_leaf()) { + if (num_results < max_num_results) { + set_result(&results[num_results], static_cast( + array_[node_pos].value()), length + 1); + } + ++num_results; + } + } + } + + return num_results; +} + +template +inline typename DoubleArrayImpl::value_type +DoubleArrayImpl::traverse(const key_type *key, + std::size_t &node_pos, std::size_t &key_pos, std::size_t length) const { + id_type id = static_cast(node_pos); + unit_type unit = array_[id]; + + if (length != 0) { + for ( ; key_pos < length; ++key_pos) { + id ^= unit.offset() ^ static_cast(key[key_pos]); + unit = array_[id]; + if (unit.label() != static_cast(key[key_pos])) { + return static_cast(-2); + } + node_pos = id; + } + } else { + for ( ; key[key_pos] != '\0'; ++key_pos) { + id ^= unit.offset() ^ static_cast(key[key_pos]); + unit = array_[id]; + if (unit.label() != static_cast(key[key_pos])) { + return static_cast(-2); + } + node_pos = id; + } + } + + if (!unit.has_leaf()) { + return static_cast(-1); + } + unit = array_[id ^ unit.offset()]; + return static_cast(unit.value()); +} + +namespace Details { + +// +// Memory management of array. +// + +template +class AutoArray { + public: + explicit AutoArray(T *array = NULL) : array_(array) {} + ~AutoArray() { + clear(); + } + + const T &operator[](std::size_t id) const { + return array_[id]; + } + T &operator[](std::size_t id) { + return array_[id]; + } + + bool empty() const { + return array_ == NULL; + } + + void clear() { + if (array_ != NULL) { + delete[] array_; + array_ = NULL; + } + } + void swap(AutoArray *array) { + T *temp = array_; + array_ = array->array_; + array->array_ = temp; + } + void reset(T *array = NULL) { + AutoArray(array).swap(this); + } + + private: + T *array_; + + // Disallows copy and assignment. + AutoArray(const AutoArray &); + AutoArray &operator=(const AutoArray &); +}; + +// +// Memory management of resizable array. +// + +template +class AutoPool { + public: + AutoPool() : buf_(), size_(0), capacity_(0) {} + ~AutoPool() { clear(); } + + const T &operator[](std::size_t id) const { + return *(reinterpret_cast(&buf_[0]) + id); + } + T &operator[](std::size_t id) { + return *(reinterpret_cast(&buf_[0]) + id); + } + + bool empty() const { + return size_ == 0; + } + std::size_t size() const { + return size_; + } + + void clear() { + resize(0); + buf_.clear(); + size_ = 0; + capacity_ = 0; + } + + void push_back(const T &value) { + append(value); + } + void pop_back() { + (*this)[--size_].~T(); + } + + void append() { + if (size_ == capacity_) + resize_buf(size_ + 1); + new(&(*this)[size_++]) T; + } + void append(const T &value) { + if (size_ == capacity_) + resize_buf(size_ + 1); + new(&(*this)[size_++]) T(value); + } + + void resize(std::size_t size) { + while (size_ > size) { + (*this)[--size_].~T(); + } + if (size > capacity_) { + resize_buf(size); + } + while (size_ < size) { + new(&(*this)[size_++]) T; + } + } + void resize(std::size_t size, const T &value) { + while (size_ > size) { + (*this)[--size_].~T(); + } + if (size > capacity_) { + resize_buf(size); + } + while (size_ < size) { + new(&(*this)[size_++]) T(value); + } + } + + void reserve(std::size_t size) { + if (size > capacity_) { + resize_buf(size); + } + } + + private: + AutoArray buf_; + std::size_t size_; + std::size_t capacity_; + + // Disallows copy and assignment. + AutoPool(const AutoPool &); + AutoPool &operator=(const AutoPool &); + + void resize_buf(std::size_t size); +}; + +template +void AutoPool::resize_buf(std::size_t size) { + std::size_t capacity; + if (size >= capacity_ * 2) { + capacity = size; + } else { + capacity = 1; + while (capacity < size) { + capacity <<= 1; + } + } + + AutoArray buf; + try { + buf.reset(new char[sizeof(T) * capacity]); + } catch (const std::bad_alloc &) { + DARTS_THROW("failed to resize pool: std::bad_alloc"); + } + + if (size_ > 0) { + T *src = reinterpret_cast(&buf_[0]); + T *dest = reinterpret_cast(&buf[0]); + for (std::size_t i = 0; i < size_; ++i) { + new(&dest[i]) T(src[i]); + src[i].~T(); + } + } + + buf_.swap(&buf); + capacity_ = capacity; +} + +// +// Memory management of stack. +// + +template +class AutoStack { + public: + AutoStack() : pool_() {} + ~AutoStack() { + clear(); + } + + const T &top() const { + return pool_[size() - 1]; + } + T &top() { + return pool_[size() - 1]; + } + + bool empty() const { + return pool_.empty(); + } + std::size_t size() const { + return pool_.size(); + } + + void push(const T &value) { + pool_.push_back(value); + } + void pop() { + pool_.pop_back(); + } + + void clear() { + pool_.clear(); + } + + private: + AutoPool pool_; + + // Disallows copy and assignment. + AutoStack(const AutoStack &); + AutoStack &operator=(const AutoStack &); +}; + +// +// Succinct bit vector. +// + +class BitVector { + public: + BitVector() : units_(), ranks_(), num_ones_(0), size_(0) {} + ~BitVector() { + clear(); + } + + bool operator[](std::size_t id) const { + return (units_[id / UNIT_SIZE] >> (id % UNIT_SIZE) & 1) == 1; + } + + id_type rank(std::size_t id) const { + std::size_t unit_id = id / UNIT_SIZE; + return ranks_[unit_id] + pop_count(units_[unit_id] + & (~0U >> (UNIT_SIZE - (id % UNIT_SIZE) - 1))); + } + + void set(std::size_t id, bool bit) { + if (bit) { + units_[id / UNIT_SIZE] |= 1U << (id % UNIT_SIZE); + } else { + units_[id / UNIT_SIZE] &= ~(1U << (id % UNIT_SIZE)); + } + } + + bool empty() const { + return units_.empty(); + } + std::size_t num_ones() const { + return num_ones_; + } + std::size_t size() const { + return size_; + } + + void append() { + if ((size_ % UNIT_SIZE) == 0) { + units_.append(0); + } + ++size_; + } + void build(); + + void clear() { + units_.clear(); + ranks_.clear(); + } + + private: + enum { UNIT_SIZE = sizeof(id_type) * 8 }; + + AutoPool units_; + AutoArray ranks_; + std::size_t num_ones_; + std::size_t size_; + + // Disallows copy and assignment. + BitVector(const BitVector &); + BitVector &operator=(const BitVector &); + + static id_type pop_count(id_type unit) { + unit = ((unit & 0xAAAAAAAA) >> 1) + (unit & 0x55555555); + unit = ((unit & 0xCCCCCCCC) >> 2) + (unit & 0x33333333); + unit = ((unit >> 4) + unit) & 0x0F0F0F0F; + unit += unit >> 8; + unit += unit >> 16; + return unit & 0xFF; + } +}; + +inline void BitVector::build() { + try { + ranks_.reset(new id_type[units_.size()]); + } catch (const std::bad_alloc &) { + DARTS_THROW("failed to build rank index: std::bad_alloc"); + } + + num_ones_ = 0; + for (std::size_t i = 0; i < units_.size(); ++i) { + ranks_[i] = num_ones_; + num_ones_ += pop_count(units_[i]); + } +} + +// +// Keyset. +// + +template +class Keyset { + public: + Keyset(std::size_t num_keys, const char_type * const *keys, + const std::size_t *lengths, const T *values) : + num_keys_(num_keys), keys_(keys), lengths_(lengths), values_(values) {} + + std::size_t num_keys() const { + return num_keys_; + } + const char_type *keys(std::size_t id) const { + return keys_[id]; + } + uchar_type keys(std::size_t key_id, std::size_t char_id) const { + if (has_lengths() && char_id >= lengths_[key_id]) + return '\0'; + return keys_[key_id][char_id]; + } + + bool has_lengths() const { + return lengths_ != NULL; + } + std::size_t lengths(std::size_t id) const { + if (has_lengths()) { + return lengths_[id]; + } + std::size_t length = 0; + while (keys_[id][length] != '\0') { + ++length; + } + return length; + } + + bool has_values() const { + return values_ != NULL; + } + const value_type values(std::size_t id) const { + if (has_values()) { + return static_cast(values_[id]); + } + return static_cast(id); + } + + private: + std::size_t num_keys_; + const char_type * const * keys_; + const std::size_t *lengths_; + const T *values_; + + // Disallows copy and assignment. + Keyset(const Keyset &); + Keyset &operator=(const Keyset &); +}; + +// +// Node of Directed Acyclic Word Graph (DAWG). +// + +class DawgNode { + public: + DawgNode() : child_(0), sibling_(0), label_('\0'), + is_state_(false), has_sibling_(false) {} + + void set_child(id_type child) { + child_ = child; + } + void set_sibling(id_type sibling) { + sibling_ = sibling; + } + void set_value(value_type value) { + child_ = value; + } + void set_label(uchar_type label) { + label_ = label; + } + void set_is_state(bool is_state) { + is_state_ = is_state; + } + void set_has_sibling(bool has_sibling) { + has_sibling_ = has_sibling; + } + + id_type child() const { + return child_; + } + id_type sibling() const { + return sibling_; + } + value_type value() const { + return static_cast(child_); + } + uchar_type label() const { + return label_; + } + bool is_state() const { + return is_state_; + } + bool has_sibling() const { + return has_sibling_; + } + + id_type unit() const { + if (label_ == '\0') { + return (child_ << 1) | (has_sibling_ ? 1 : 0); + } + return (child_ << 2) | (is_state_ ? 2 : 0) | (has_sibling_ ? 1 : 0); + } + + private: + id_type child_; + id_type sibling_; + uchar_type label_; + bool is_state_; + bool has_sibling_; + + // Copyable. +}; + +// +// Fixed unit of Directed Acyclic Word Graph (DAWG). +// + +class DawgUnit { + public: + explicit DawgUnit(id_type unit = 0) : unit_(unit) {} + DawgUnit(const DawgUnit &unit) : unit_(unit.unit_) {} + + DawgUnit &operator=(id_type unit) { + unit_ = unit; + return *this; + } + + id_type unit() const { + return unit_; + } + + id_type child() const { + return unit_ >> 2; + } + bool has_sibling() const { + return (unit_ & 1) == 1; + } + value_type value() const { + return static_cast(unit_ >> 1); + } + bool is_state() const { + return (unit_ & 2) == 2; + } + + private: + id_type unit_; + + // Copyable. +}; + +// +// Directed Acyclic Word Graph (DAWG) builder. +// + +class DawgBuilder { + public: + DawgBuilder() : nodes_(), units_(), labels_(), is_intersections_(), + table_(), node_stack_(), recycle_bin_(), num_states_(0) {} + ~DawgBuilder() { + clear(); + } + + id_type root() const { + return 0; + } + + id_type child(id_type id) const { + return units_[id].child(); + } + id_type sibling(id_type id) const { + return units_[id].has_sibling() ? (id + 1) : 0; + } + int value(id_type id) const { + return units_[id].value(); + } + + bool is_leaf(id_type id) const { + return label(id) == '\0'; + } + uchar_type label(id_type id) const { + return labels_[id]; + } + + bool is_intersection(id_type id) const { + return is_intersections_[id]; + } + id_type intersection_id(id_type id) const { + return is_intersections_.rank(id) - 1; + } + + std::size_t num_intersections() const { + return is_intersections_.num_ones(); + } + + std::size_t size() const { + return units_.size(); + } + + void init(); + void finish(); + + void insert(const char *key, std::size_t length, value_type value); + + void clear(); + + private: + enum { INITIAL_TABLE_SIZE = 1 << 10 }; + + AutoPool nodes_; + AutoPool units_; + AutoPool labels_; + BitVector is_intersections_; + AutoPool table_; + AutoStack node_stack_; + AutoStack recycle_bin_; + std::size_t num_states_; + + // Disallows copy and assignment. + DawgBuilder(const DawgBuilder &); + DawgBuilder &operator=(const DawgBuilder &); + + void flush(id_type id); + + void expand_table(); + + id_type find_unit(id_type id, id_type *hash_id) const; + id_type find_node(id_type node_id, id_type *hash_id) const; + + bool are_equal(id_type node_id, id_type unit_id) const; + + id_type hash_unit(id_type id) const; + id_type hash_node(id_type id) const; + + id_type append_node(); + id_type append_unit(); + + void free_node(id_type id) { + recycle_bin_.push(id); + } + + static id_type hash(id_type key) { + key = ~key + (key << 15); // key = (key << 15) - key - 1; + key = key ^ (key >> 12); + key = key + (key << 2); + key = key ^ (key >> 4); + key = key * 2057; // key = (key + (key << 3)) + (key << 11); + key = key ^ (key >> 16); + return key; + } +}; + +inline void DawgBuilder::init() { + table_.resize(INITIAL_TABLE_SIZE, 0); + + append_node(); + append_unit(); + + num_states_ = 1; + + nodes_[0].set_label(0xFF); + node_stack_.push(0); +} + +inline void DawgBuilder::finish() { + flush(0); + + units_[0] = nodes_[0].unit(); + labels_[0] = nodes_[0].label(); + + nodes_.clear(); + table_.clear(); + node_stack_.clear(); + recycle_bin_.clear(); + + is_intersections_.build(); +} + +inline void DawgBuilder::insert(const char *key, std::size_t length, + value_type value) { + if (value < 0) { + DARTS_THROW("failed to insert key: negative value"); + } else if (length == 0) { + DARTS_THROW("failed to insert key: zero-length key"); + } + + id_type id = 0; + std::size_t key_pos = 0; + + for ( ; key_pos <= length; ++key_pos) { + id_type child_id = nodes_[id].child(); + if (child_id == 0) { + break; + } + + uchar_type key_label = static_cast(key[key_pos]); + if (key_pos < length && key_label == '\0') { + DARTS_THROW("failed to insert key: invalid null character"); + } + + uchar_type unit_label = nodes_[child_id].label(); + if (key_label < unit_label) { + DARTS_THROW("failed to insert key: wrong key order"); + } else if (key_label > unit_label) { + nodes_[child_id].set_has_sibling(true); + flush(child_id); + break; + } + id = child_id; + } + + if (key_pos > length) { + return; + } + + for ( ; key_pos <= length; ++key_pos) { + uchar_type key_label = static_cast( + (key_pos < length) ? key[key_pos] : '\0'); + id_type child_id = append_node(); + + if (nodes_[id].child() == 0) { + nodes_[child_id].set_is_state(true); + } + nodes_[child_id].set_sibling(nodes_[id].child()); + nodes_[child_id].set_label(key_label); + nodes_[id].set_child(child_id); + node_stack_.push(child_id); + + id = child_id; + } + nodes_[id].set_value(value); +} + +inline void DawgBuilder::clear() { + nodes_.clear(); + units_.clear(); + labels_.clear(); + is_intersections_.clear(); + table_.clear(); + node_stack_.clear(); + recycle_bin_.clear(); + num_states_ = 0; +} + +inline void DawgBuilder::flush(id_type id) { + while (node_stack_.top() != id) { + id_type node_id = node_stack_.top(); + node_stack_.pop(); + + if (num_states_ >= table_.size() - (table_.size() >> 2)) { + expand_table(); + } + + id_type num_siblings = 0; + for (id_type i = node_id; i != 0; i = nodes_[i].sibling()) { + ++num_siblings; + } + + id_type hash_id; + id_type match_id = find_node(node_id, &hash_id); + if (match_id != 0) { + is_intersections_.set(match_id, true); + } else { + id_type unit_id = 0; + for (id_type i = 0; i < num_siblings; ++i) { + unit_id = append_unit(); + } + for (id_type i = node_id; i != 0; i = nodes_[i].sibling()) { + units_[unit_id] = nodes_[i].unit(); + labels_[unit_id] = nodes_[i].label(); + --unit_id; + } + match_id = unit_id + 1; + table_[hash_id] = match_id; + ++num_states_; + } + + for (id_type i = node_id, next; i != 0; i = next) { + next = nodes_[i].sibling(); + free_node(i); + } + + nodes_[node_stack_.top()].set_child(match_id); + } + node_stack_.pop(); +} + +inline void DawgBuilder::expand_table() { + std::size_t table_size = table_.size() << 1; + table_.clear(); + table_.resize(table_size, 0); + + for (std::size_t i = 1; i < units_.size(); ++i) { + id_type id = static_cast(i); + if (labels_[id] == '\0' || units_[id].is_state()) { + id_type hash_id; + find_unit(id, &hash_id); + table_[hash_id] = id; + } + } +} + +inline id_type DawgBuilder::find_unit(id_type id, id_type *hash_id) const { + *hash_id = hash_unit(id) % table_.size(); + for ( ; ; *hash_id = (*hash_id + 1) % table_.size()) { + id_type unit_id = table_[*hash_id]; + if (unit_id == 0) { + break; + } + + // There must not be the same unit. + } + return 0; +} + +inline id_type DawgBuilder::find_node(id_type node_id, + id_type *hash_id) const { + *hash_id = hash_node(node_id) % table_.size(); + for ( ; ; *hash_id = (*hash_id + 1) % table_.size()) { + id_type unit_id = table_[*hash_id]; + if (unit_id == 0) { + break; + } + + if (are_equal(node_id, unit_id)) { + return unit_id; + } + } + return 0; +} + +inline bool DawgBuilder::are_equal(id_type node_id, id_type unit_id) const { + for (id_type i = nodes_[node_id].sibling(); i != 0; + i = nodes_[i].sibling()) { + if (units_[unit_id].has_sibling() == false) { + return false; + } + ++unit_id; + } + if (units_[unit_id].has_sibling() == true) { + return false; + } + + for (id_type i = node_id; i != 0; i = nodes_[i].sibling(), --unit_id) { + if (nodes_[i].unit() != units_[unit_id].unit() || + nodes_[i].label() != labels_[unit_id]) { + return false; + } + } + return true; +} + +inline id_type DawgBuilder::hash_unit(id_type id) const { + id_type hash_value = 0; + for ( ; id != 0; ++id) { + id_type unit = units_[id].unit(); + uchar_type label = labels_[id]; + hash_value ^= hash((label << 24) ^ unit); + + if (units_[id].has_sibling() == false) { + break; + } + } + return hash_value; +} + +inline id_type DawgBuilder::hash_node(id_type id) const { + id_type hash_value = 0; + for ( ; id != 0; id = nodes_[id].sibling()) { + id_type unit = nodes_[id].unit(); + uchar_type label = nodes_[id].label(); + hash_value ^= hash((label << 24) ^ unit); + } + return hash_value; +} + +inline id_type DawgBuilder::append_unit() { + is_intersections_.append(); + units_.append(); + labels_.append(); + + return static_cast(is_intersections_.size() - 1); +} + +inline id_type DawgBuilder::append_node() { + id_type id; + if (recycle_bin_.empty()) { + id = static_cast(nodes_.size()); + nodes_.append(); + } else { + id = recycle_bin_.top(); + nodes_[id] = DawgNode(); + recycle_bin_.pop(); + } + return id; +} + +// +// Unit of double-array builder. +// + +class DoubleArrayBuilderUnit { + public: + DoubleArrayBuilderUnit() : unit_(0) {} + + void set_has_leaf(bool has_leaf) { + if (has_leaf) { + unit_ |= 1U << 8; + } else { + unit_ &= ~(1U << 8); + } + } + void set_value(value_type value) { + unit_ = value | (1U << 31); + } + void set_label(uchar_type label) { + unit_ = (unit_ & ~0xFFU) | label; + } + void set_offset(id_type offset) { + if (offset >= 1U << 29) { + DARTS_THROW("failed to modify unit: too large offset"); + } + unit_ &= (1U << 31) | (1U << 8) | 0xFF; + if (offset < 1U << 21) { + unit_ |= (offset << 10); + } else { + unit_ |= (offset << 2) | (1U << 9); + } + } + + private: + id_type unit_; + + // Copyable. +}; + +// +// Extra unit of double-array builder. +// + +class DoubleArrayBuilderExtraUnit { + public: + DoubleArrayBuilderExtraUnit() : prev_(0), next_(0), + is_fixed_(false), is_used_(false) {} + + void set_prev(id_type prev) { + prev_ = prev; + } + void set_next(id_type next) { + next_ = next; + } + void set_is_fixed(bool is_fixed) { + is_fixed_ = is_fixed; + } + void set_is_used(bool is_used) { + is_used_ = is_used; + } + + id_type prev() const { + return prev_; + } + id_type next() const { + return next_; + } + bool is_fixed() const { + return is_fixed_; + } + bool is_used() const { + return is_used_; + } + + private: + id_type prev_; + id_type next_; + bool is_fixed_; + bool is_used_; + + // Copyable. +}; + +// +// DAWG -> double-array converter. +// + +class DoubleArrayBuilder { + public: + explicit DoubleArrayBuilder(progress_func_type progress_func) + : progress_func_(progress_func), units_(), extras_(), labels_(), + table_(), extras_head_(0) {} + ~DoubleArrayBuilder() { + clear(); + } + + template + void build(const Keyset &keyset); + void copy(std::size_t *size_ptr, DoubleArrayUnit **buf_ptr) const; + + void clear(); + + private: + enum { BLOCK_SIZE = 256 }; + enum { NUM_EXTRA_BLOCKS = 16 }; + enum { NUM_EXTRAS = BLOCK_SIZE * NUM_EXTRA_BLOCKS }; + + enum { UPPER_MASK = 0xFF << 21 }; + enum { LOWER_MASK = 0xFF }; + + typedef DoubleArrayBuilderUnit unit_type; + typedef DoubleArrayBuilderExtraUnit extra_type; + + progress_func_type progress_func_; + AutoPool units_; + AutoArray extras_; + AutoPool labels_; + AutoArray table_; + id_type extras_head_; + + // Disallows copy and assignment. + DoubleArrayBuilder(const DoubleArrayBuilder &); + DoubleArrayBuilder &operator=(const DoubleArrayBuilder &); + + std::size_t num_blocks() const { + return units_.size() / BLOCK_SIZE; + } + + const extra_type &extras(id_type id) const { + return extras_[id % NUM_EXTRAS]; + } + extra_type &extras(id_type id) { + return extras_[id % NUM_EXTRAS]; + } + + template + void build_dawg(const Keyset &keyset, DawgBuilder *dawg_builder); + void build_from_dawg(const DawgBuilder &dawg); + void build_from_dawg(const DawgBuilder &dawg, + id_type dawg_id, id_type dic_id); + id_type arrange_from_dawg(const DawgBuilder &dawg, + id_type dawg_id, id_type dic_id); + + template + void build_from_keyset(const Keyset &keyset); + template + void build_from_keyset(const Keyset &keyset, std::size_t begin, + std::size_t end, std::size_t depth, id_type dic_id); + template + id_type arrange_from_keyset(const Keyset &keyset, std::size_t begin, + std::size_t end, std::size_t depth, id_type dic_id); + + id_type find_valid_offset(id_type id) const; + bool is_valid_offset(id_type id, id_type offset) const; + + void reserve_id(id_type id); + void expand_units(); + + void fix_all_blocks(); + void fix_block(id_type block_id); +}; + +template +void DoubleArrayBuilder::build(const Keyset &keyset) { + if (keyset.has_values()) { + Details::DawgBuilder dawg_builder; + build_dawg(keyset, &dawg_builder); + build_from_dawg(dawg_builder); + dawg_builder.clear(); + } else { + build_from_keyset(keyset); + } +} + +inline void DoubleArrayBuilder::copy(std::size_t *size_ptr, + DoubleArrayUnit **buf_ptr) const { + if (size_ptr != NULL) { + *size_ptr = units_.size(); + } + if (buf_ptr != NULL) { + *buf_ptr = new DoubleArrayUnit[units_.size()]; + unit_type *units = reinterpret_cast(*buf_ptr); + for (std::size_t i = 0; i < units_.size(); ++i) { + units[i] = units_[i]; + } + } +} + +inline void DoubleArrayBuilder::clear() { + units_.clear(); + extras_.clear(); + labels_.clear(); + table_.clear(); + extras_head_ = 0; +} + +template +void DoubleArrayBuilder::build_dawg(const Keyset &keyset, + DawgBuilder *dawg_builder) { + dawg_builder->init(); + for (std::size_t i = 0; i < keyset.num_keys(); ++i) { + dawg_builder->insert(keyset.keys(i), keyset.lengths(i), keyset.values(i)); + if (progress_func_ != NULL) { + progress_func_(i + 1, keyset.num_keys() + 1); + } + } + dawg_builder->finish(); +} + +inline void DoubleArrayBuilder::build_from_dawg(const DawgBuilder &dawg) { + std::size_t num_units = 1; + while (num_units < dawg.size()) { + num_units <<= 1; + } + units_.reserve(num_units); + + table_.reset(new id_type[dawg.num_intersections()]); + for (std::size_t i = 0; i < dawg.num_intersections(); ++i) { + table_[i] = 0; + } + + extras_.reset(new extra_type[NUM_EXTRAS]); + + reserve_id(0); + extras(0).set_is_used(true); + units_[0].set_offset(1); + units_[0].set_label('\0'); + + if (dawg.child(dawg.root()) != 0) { + build_from_dawg(dawg, dawg.root(), 0); + } + + fix_all_blocks(); + + extras_.clear(); + labels_.clear(); + table_.clear(); +} + +inline void DoubleArrayBuilder::build_from_dawg(const DawgBuilder &dawg, + id_type dawg_id, id_type dic_id) { + id_type dawg_child_id = dawg.child(dawg_id); + if (dawg.is_intersection(dawg_child_id)) { + id_type intersection_id = dawg.intersection_id(dawg_child_id); + id_type offset = table_[intersection_id]; + if (offset != 0) { + offset ^= dic_id; + if (!(offset & UPPER_MASK) || !(offset & LOWER_MASK)) { + if (dawg.is_leaf(dawg_child_id)) { + units_[dic_id].set_has_leaf(true); + } + units_[dic_id].set_offset(offset); + return; + } + } + } + + id_type offset = arrange_from_dawg(dawg, dawg_id, dic_id); + if (dawg.is_intersection(dawg_child_id)) { + table_[dawg.intersection_id(dawg_child_id)] = offset; + } + + do { + uchar_type child_label = dawg.label(dawg_child_id); + id_type dic_child_id = offset ^ child_label; + if (child_label != '\0') { + build_from_dawg(dawg, dawg_child_id, dic_child_id); + } + dawg_child_id = dawg.sibling(dawg_child_id); + } while (dawg_child_id != 0); +} + +inline id_type DoubleArrayBuilder::arrange_from_dawg(const DawgBuilder &dawg, + id_type dawg_id, id_type dic_id) { + labels_.resize(0); + + id_type dawg_child_id = dawg.child(dawg_id); + while (dawg_child_id != 0) { + labels_.append(dawg.label(dawg_child_id)); + dawg_child_id = dawg.sibling(dawg_child_id); + } + + id_type offset = find_valid_offset(dic_id); + units_[dic_id].set_offset(dic_id ^ offset); + + dawg_child_id = dawg.child(dawg_id); + for (std::size_t i = 0; i < labels_.size(); ++i) { + id_type dic_child_id = offset ^ labels_[i]; + reserve_id(dic_child_id); + + if (dawg.is_leaf(dawg_child_id)) { + units_[dic_id].set_has_leaf(true); + units_[dic_child_id].set_value(dawg.value(dawg_child_id)); + } else { + units_[dic_child_id].set_label(labels_[i]); + } + + dawg_child_id = dawg.sibling(dawg_child_id); + } + extras(offset).set_is_used(true); + + return offset; +} + +template +void DoubleArrayBuilder::build_from_keyset(const Keyset &keyset) { + std::size_t num_units = 1; + while (num_units < keyset.num_keys()) { + num_units <<= 1; + } + units_.reserve(num_units); + + extras_.reset(new extra_type[NUM_EXTRAS]); + + reserve_id(0); + extras(0).set_is_used(true); + units_[0].set_offset(1); + units_[0].set_label('\0'); + + if (keyset.num_keys() > 0) { + build_from_keyset(keyset, 0, keyset.num_keys(), 0, 0); + } + + fix_all_blocks(); + + extras_.clear(); + labels_.clear(); +} + +template +void DoubleArrayBuilder::build_from_keyset(const Keyset &keyset, + std::size_t begin, std::size_t end, std::size_t depth, id_type dic_id) { + id_type offset = arrange_from_keyset(keyset, begin, end, depth, dic_id); + + while (begin < end) { + if (keyset.keys(begin, depth) != '\0') { + break; + } + ++begin; + } + if (begin == end) { + return; + } + + std::size_t last_begin = begin; + uchar_type last_label = keyset.keys(begin, depth); + while (++begin < end) { + uchar_type label = keyset.keys(begin, depth); + if (label != last_label) { + build_from_keyset(keyset, last_begin, begin, + depth + 1, offset ^ last_label); + last_begin = begin; + last_label = keyset.keys(begin, depth); + } + } + build_from_keyset(keyset, last_begin, end, depth + 1, offset ^ last_label); +} + +template +id_type DoubleArrayBuilder::arrange_from_keyset(const Keyset &keyset, + std::size_t begin, std::size_t end, std::size_t depth, id_type dic_id) { + labels_.resize(0); + + value_type value = -1; + for (std::size_t i = begin; i < end; ++i) { + uchar_type label = keyset.keys(i, depth); + if (label == '\0') { + if (keyset.has_lengths() && depth < keyset.lengths(i)) { + DARTS_THROW("failed to build double-array: " + "invalid null character"); + } else if (keyset.values(i) < 0) { + DARTS_THROW("failed to build double-array: negative value"); + } + + if (value == -1) { + value = keyset.values(i); + } + if (progress_func_ != NULL) { + progress_func_(i + 1, keyset.num_keys() + 1); + } + } + + if (labels_.empty()) { + labels_.append(label); + } else if (label != labels_[labels_.size() - 1]) { + if (label < labels_[labels_.size() - 1]) { + DARTS_THROW("failed to build double-array: wrong key order"); + } + labels_.append(label); + } + } + + id_type offset = find_valid_offset(dic_id); + units_[dic_id].set_offset(dic_id ^ offset); + + for (std::size_t i = 0; i < labels_.size(); ++i) { + id_type dic_child_id = offset ^ labels_[i]; + reserve_id(dic_child_id); + if (labels_[i] == '\0') { + units_[dic_id].set_has_leaf(true); + units_[dic_child_id].set_value(value); + } else { + units_[dic_child_id].set_label(labels_[i]); + } + } + extras(offset).set_is_used(true); + + return offset; +} + +inline id_type DoubleArrayBuilder::find_valid_offset(id_type id) const { + if (extras_head_ >= units_.size()) { + return units_.size() | (id & LOWER_MASK); + } + + id_type unfixed_id = extras_head_; + do { + id_type offset = unfixed_id ^ labels_[0]; + if (is_valid_offset(id, offset)) { + return offset; + } + unfixed_id = extras(unfixed_id).next(); + } while (unfixed_id != extras_head_); + + return units_.size() | (id & LOWER_MASK); +} + +inline bool DoubleArrayBuilder::is_valid_offset(id_type id, + id_type offset) const { + if (extras(offset).is_used()) { + return false; + } + + id_type rel_offset = id ^ offset; + if ((rel_offset & LOWER_MASK) && (rel_offset & UPPER_MASK)) { + return false; + } + + for (std::size_t i = 1; i < labels_.size(); ++i) { + if (extras(offset ^ labels_[i]).is_fixed()) { + return false; + } + } + + return true; +} + +inline void DoubleArrayBuilder::reserve_id(id_type id) { + if (id >= units_.size()) { + expand_units(); + } + + if (id == extras_head_) { + extras_head_ = extras(id).next(); + if (extras_head_ == id) { + extras_head_ = units_.size(); + } + } + extras(extras(id).prev()).set_next(extras(id).next()); + extras(extras(id).next()).set_prev(extras(id).prev()); + extras(id).set_is_fixed(true); +} + +inline void DoubleArrayBuilder::expand_units() { + id_type src_num_units = units_.size(); + id_type src_num_blocks = num_blocks(); + + id_type dest_num_units = src_num_units + BLOCK_SIZE; + id_type dest_num_blocks = src_num_blocks + 1; + + if (dest_num_blocks > NUM_EXTRA_BLOCKS) { + fix_block(src_num_blocks - NUM_EXTRA_BLOCKS); + } + + units_.resize(dest_num_units); + + if (dest_num_blocks > NUM_EXTRA_BLOCKS) { + for (std::size_t id = src_num_units; id < dest_num_units; ++id) { + extras(id).set_is_used(false); + extras(id).set_is_fixed(false); + } + } + + for (id_type i = src_num_units + 1; i < dest_num_units; ++i) { + extras(i - 1).set_next(i); + extras(i).set_prev(i - 1); + } + + extras(src_num_units).set_prev(dest_num_units - 1); + extras(dest_num_units - 1).set_next(src_num_units); + + extras(src_num_units).set_prev(extras(extras_head_).prev()); + extras(dest_num_units - 1).set_next(extras_head_); + + extras(extras(extras_head_).prev()).set_next(src_num_units); + extras(extras_head_).set_prev(dest_num_units - 1); +} + +inline void DoubleArrayBuilder::fix_all_blocks() { + id_type begin = 0; + if (num_blocks() > NUM_EXTRA_BLOCKS) { + begin = num_blocks() - NUM_EXTRA_BLOCKS; + } + id_type end = num_blocks(); + + for (id_type block_id = begin; block_id != end; ++block_id) { + fix_block(block_id); + } +} + +inline void DoubleArrayBuilder::fix_block(id_type block_id) { + id_type begin = block_id * BLOCK_SIZE; + id_type end = begin + BLOCK_SIZE; + + id_type unused_offset = 0; + for (id_type offset = begin; offset != end; ++offset) { + if (!extras(offset).is_used()) { + unused_offset = offset; + break; + } + } + + for (id_type id = begin; id != end; ++id) { + if (!extras(id).is_fixed()) { + reserve_id(id); + units_[id].set_label(static_cast(id ^ unused_offset)); + } + } +} + +} // namespace Details + +// +// Member function build() of DoubleArrayImpl. +// + +template +int DoubleArrayImpl::build(std::size_t num_keys, + const key_type * const *keys, const std::size_t *lengths, + const value_type *values, Details::progress_func_type progress_func) { + Details::Keyset keyset(num_keys, keys, lengths, values); + + Details::DoubleArrayBuilder builder(progress_func); + builder.build(keyset); + + std::size_t size = 0; + unit_type *buf = NULL; + builder.copy(&size, &buf); + + clear(); + + size_ = size; + array_ = buf; + buf_ = buf; + + if (progress_func != NULL) { + progress_func(num_keys + 1, num_keys + 1); + } + + return 0; +} + +} // namespace Darts + +#undef DARTS_INT_TO_STR +#undef DARTS_LINE_TO_STR +#undef DARTS_LINE_STR +#undef DARTS_THROW + +#endif // DARTS_H_ diff --git a/thirdparty/json.hpp b/thirdparty/json.hpp new file mode 100644 index 000000000..4d1a37ad7 --- /dev/null +++ b/thirdparty/json.hpp @@ -0,0 +1,24596 @@ +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + +/****************************************************************************\ + * Note on documentation: The source files contain links to the online * + * documentation of the public API at https://json.nlohmann.me. This URL * + * contains the most recent documentation and should also be applicable to * + * previous versions; documentation for deprecated functions is not * + * removed, but marked deprecated. See "Generate documentation" section in * + * file docs/README.md. * +\****************************************************************************/ + +#ifndef INCLUDE_NLOHMANN_JSON_HPP_ +#define INCLUDE_NLOHMANN_JSON_HPP_ + +#include // all_of, find, for_each +#include // nullptr_t, ptrdiff_t, size_t +#include // hash, less +#include // initializer_list +#ifndef JSON_NO_IO + #include // istream, ostream +#endif // JSON_NO_IO +#include // random_access_iterator_tag +#include // unique_ptr +#include // accumulate +#include // string, stoi, to_string +#include // declval, forward, move, pair, swap +#include // vector + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +// This file contains all macro definitions affecting or depending on the ABI + +#ifndef JSON_SKIP_LIBRARY_VERSION_CHECK + #if defined(NLOHMANN_JSON_VERSION_MAJOR) && defined(NLOHMANN_JSON_VERSION_MINOR) && defined(NLOHMANN_JSON_VERSION_PATCH) + #if NLOHMANN_JSON_VERSION_MAJOR != 3 || NLOHMANN_JSON_VERSION_MINOR != 11 || NLOHMANN_JSON_VERSION_PATCH != 2 + #warning "Already included a different version of the library!" + #endif + #endif +#endif + +#define NLOHMANN_JSON_VERSION_MAJOR 3 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_MINOR 11 // NOLINT(modernize-macro-to-enum) +#define NLOHMANN_JSON_VERSION_PATCH 2 // NOLINT(modernize-macro-to-enum) + +#ifndef JSON_DIAGNOSTICS + #define JSON_DIAGNOSTICS 0 +#endif + +#ifndef JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + #define JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON 0 +#endif + +#if JSON_DIAGNOSTICS + #define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS _diag +#else + #define NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS +#endif + +#if JSON_USE_LEGACY_DISCARDED_VALUE_COMPARISON + #define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON _ldvcmp +#else + #define NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON +#endif + +#ifndef NLOHMANN_JSON_NAMESPACE_NO_VERSION + #define NLOHMANN_JSON_NAMESPACE_NO_VERSION 0 +#endif + +// Construct the namespace ABI tags component +#define NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b) json_abi ## a ## b +#define NLOHMANN_JSON_ABI_TAGS_CONCAT(a, b) \ + NLOHMANN_JSON_ABI_TAGS_CONCAT_EX(a, b) + +#define NLOHMANN_JSON_ABI_TAGS \ + NLOHMANN_JSON_ABI_TAGS_CONCAT( \ + NLOHMANN_JSON_ABI_TAG_DIAGNOSTICS, \ + NLOHMANN_JSON_ABI_TAG_LEGACY_DISCARDED_VALUE_COMPARISON) + +// Construct the namespace version component +#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch) \ + _v ## major ## _ ## minor ## _ ## patch +#define NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(major, minor, patch) \ + NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT_EX(major, minor, patch) + +#if NLOHMANN_JSON_NAMESPACE_NO_VERSION +#define NLOHMANN_JSON_NAMESPACE_VERSION +#else +#define NLOHMANN_JSON_NAMESPACE_VERSION \ + NLOHMANN_JSON_NAMESPACE_VERSION_CONCAT(NLOHMANN_JSON_VERSION_MAJOR, \ + NLOHMANN_JSON_VERSION_MINOR, \ + NLOHMANN_JSON_VERSION_PATCH) +#endif + +// Combine namespace components +#define NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b) a ## b +#define NLOHMANN_JSON_NAMESPACE_CONCAT(a, b) \ + NLOHMANN_JSON_NAMESPACE_CONCAT_EX(a, b) + +#ifndef NLOHMANN_JSON_NAMESPACE +#define NLOHMANN_JSON_NAMESPACE \ + nlohmann::NLOHMANN_JSON_NAMESPACE_CONCAT( \ + NLOHMANN_JSON_ABI_TAGS, \ + NLOHMANN_JSON_NAMESPACE_VERSION) +#endif + +#ifndef NLOHMANN_JSON_NAMESPACE_BEGIN +#define NLOHMANN_JSON_NAMESPACE_BEGIN \ + namespace nlohmann \ + { \ + inline namespace NLOHMANN_JSON_NAMESPACE_CONCAT( \ + NLOHMANN_JSON_ABI_TAGS, \ + NLOHMANN_JSON_NAMESPACE_VERSION) \ + { +#endif + +#ifndef NLOHMANN_JSON_NAMESPACE_END +#define NLOHMANN_JSON_NAMESPACE_END \ + } /* namespace (inline namespace) NOLINT(readability/namespace) */ \ + } // namespace nlohmann +#endif + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // transform +#include // array +#include // forward_list +#include // inserter, front_inserter, end +#include // map +#include // string +#include // tuple, make_tuple +#include // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +#include // unordered_map +#include // pair, declval +#include // valarray + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // nullptr_t +#include // exception +#include // runtime_error +#include // to_string +#include // vector + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // array +#include // size_t +#include // uint8_t +#include // string + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // declval, pair +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +template struct make_void +{ + using type = void; +}; +template using void_t = typename make_void::type; + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +// https://en.cppreference.com/w/cpp/experimental/is_detected +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + nonesuch(nonesuch const&&) = delete; + void operator=(nonesuch const&) = delete; + void operator=(nonesuch&&) = delete; +}; + +template class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; + +template class Op, class... Args> +struct detector>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op; +}; + +template class Op, class... Args> +using is_detected = typename detector::value_t; + +template class Op, class... Args> +struct is_detected_lazy : is_detected { }; + +template class Op, class... Args> +using detected_t = typename detector::type; + +template class Op, class... Args> +using detected_or = detector; + +template class Op, class... Args> +using detected_or_t = typename detected_or::type; + +template class Op, class... Args> +using is_detected_exact = std::is_same>; + +template class Op, class... Args> +using is_detected_convertible = + std::is_convertible, To>; + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include + + +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-FileCopyrightText: 2016-2021 Evan Nemerson +// SPDX-License-Identifier: MIT + +/* Hedley - https://nemequ.github.io/hedley + * Created by Evan Nemerson + */ + +#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 15) +#if defined(JSON_HEDLEY_VERSION) + #undef JSON_HEDLEY_VERSION +#endif +#define JSON_HEDLEY_VERSION 15 + +#if defined(JSON_HEDLEY_STRINGIFY_EX) + #undef JSON_HEDLEY_STRINGIFY_EX +#endif +#define JSON_HEDLEY_STRINGIFY_EX(x) #x + +#if defined(JSON_HEDLEY_STRINGIFY) + #undef JSON_HEDLEY_STRINGIFY +#endif +#define JSON_HEDLEY_STRINGIFY(x) JSON_HEDLEY_STRINGIFY_EX(x) + +#if defined(JSON_HEDLEY_CONCAT_EX) + #undef JSON_HEDLEY_CONCAT_EX +#endif +#define JSON_HEDLEY_CONCAT_EX(a,b) a##b + +#if defined(JSON_HEDLEY_CONCAT) + #undef JSON_HEDLEY_CONCAT +#endif +#define JSON_HEDLEY_CONCAT(a,b) JSON_HEDLEY_CONCAT_EX(a,b) + +#if defined(JSON_HEDLEY_CONCAT3_EX) + #undef JSON_HEDLEY_CONCAT3_EX +#endif +#define JSON_HEDLEY_CONCAT3_EX(a,b,c) a##b##c + +#if defined(JSON_HEDLEY_CONCAT3) + #undef JSON_HEDLEY_CONCAT3 +#endif +#define JSON_HEDLEY_CONCAT3(a,b,c) JSON_HEDLEY_CONCAT3_EX(a,b,c) + +#if defined(JSON_HEDLEY_VERSION_ENCODE) + #undef JSON_HEDLEY_VERSION_ENCODE +#endif +#define JSON_HEDLEY_VERSION_ENCODE(major,minor,revision) (((major) * 1000000) + ((minor) * 1000) + (revision)) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MAJOR) + #undef JSON_HEDLEY_VERSION_DECODE_MAJOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MAJOR(version) ((version) / 1000000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_MINOR) + #undef JSON_HEDLEY_VERSION_DECODE_MINOR +#endif +#define JSON_HEDLEY_VERSION_DECODE_MINOR(version) (((version) % 1000000) / 1000) + +#if defined(JSON_HEDLEY_VERSION_DECODE_REVISION) + #undef JSON_HEDLEY_VERSION_DECODE_REVISION +#endif +#define JSON_HEDLEY_VERSION_DECODE_REVISION(version) ((version) % 1000) + +#if defined(JSON_HEDLEY_GNUC_VERSION) + #undef JSON_HEDLEY_GNUC_VERSION +#endif +#if defined(__GNUC__) && defined(__GNUC_PATCHLEVEL__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, __GNUC_PATCHLEVEL__) +#elif defined(__GNUC__) + #define JSON_HEDLEY_GNUC_VERSION JSON_HEDLEY_VERSION_ENCODE(__GNUC__, __GNUC_MINOR__, 0) +#endif + +#if defined(JSON_HEDLEY_GNUC_VERSION_CHECK) + #undef JSON_HEDLEY_GNUC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GNUC_VERSION) + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GNUC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION) + #undef JSON_HEDLEY_MSVC_VERSION +#endif +#if defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 140000000) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 10000000, (_MSC_FULL_VER % 10000000) / 100000, (_MSC_FULL_VER % 100000) / 100) +#elif defined(_MSC_FULL_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_FULL_VER / 1000000, (_MSC_FULL_VER % 1000000) / 10000, (_MSC_FULL_VER % 10000) / 10) +#elif defined(_MSC_VER) && !defined(__ICL) + #define JSON_HEDLEY_MSVC_VERSION JSON_HEDLEY_VERSION_ENCODE(_MSC_VER / 100, _MSC_VER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_MSVC_VERSION_CHECK) + #undef JSON_HEDLEY_MSVC_VERSION_CHECK +#endif +#if !defined(JSON_HEDLEY_MSVC_VERSION) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (0) +#elif defined(_MSC_VER) && (_MSC_VER >= 1400) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 10000000) + (minor * 100000) + (patch))) +#elif defined(_MSC_VER) && (_MSC_VER >= 1200) + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_FULL_VER >= ((major * 1000000) + (minor * 10000) + (patch))) +#else + #define JSON_HEDLEY_MSVC_VERSION_CHECK(major,minor,patch) (_MSC_VER >= ((major * 100) + (minor))) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION) + #undef JSON_HEDLEY_INTEL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, __INTEL_COMPILER_UPDATE) +#elif defined(__INTEL_COMPILER) && !defined(__ICL) + #define JSON_HEDLEY_INTEL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER / 100, __INTEL_COMPILER % 100, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_VERSION) + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #undef JSON_HEDLEY_INTEL_CL_VERSION +#endif +#if defined(__INTEL_COMPILER) && defined(__INTEL_COMPILER_UPDATE) && defined(__ICL) + #define JSON_HEDLEY_INTEL_CL_VERSION JSON_HEDLEY_VERSION_ENCODE(__INTEL_COMPILER, __INTEL_COMPILER_UPDATE, 0) +#endif + +#if defined(JSON_HEDLEY_INTEL_CL_VERSION_CHECK) + #undef JSON_HEDLEY_INTEL_CL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_INTEL_CL_VERSION) + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_INTEL_CL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_INTEL_CL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION) + #undef JSON_HEDLEY_PGI_VERSION +#endif +#if defined(__PGI) && defined(__PGIC__) && defined(__PGIC_MINOR__) && defined(__PGIC_PATCHLEVEL__) + #define JSON_HEDLEY_PGI_VERSION JSON_HEDLEY_VERSION_ENCODE(__PGIC__, __PGIC_MINOR__, __PGIC_PATCHLEVEL__) +#endif + +#if defined(JSON_HEDLEY_PGI_VERSION_CHECK) + #undef JSON_HEDLEY_PGI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PGI_VERSION) + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PGI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PGI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #undef JSON_HEDLEY_SUNPRO_VERSION +#endif +#if defined(__SUNPRO_C) && (__SUNPRO_C > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_C >> 16) & 0xf) * 10) + ((__SUNPRO_C >> 12) & 0xf), (((__SUNPRO_C >> 8) & 0xf) * 10) + ((__SUNPRO_C >> 4) & 0xf), (__SUNPRO_C & 0xf) * 10) +#elif defined(__SUNPRO_C) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_C >> 8) & 0xf, (__SUNPRO_C >> 4) & 0xf, (__SUNPRO_C) & 0xf) +#elif defined(__SUNPRO_CC) && (__SUNPRO_CC > 0x1000) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((((__SUNPRO_CC >> 16) & 0xf) * 10) + ((__SUNPRO_CC >> 12) & 0xf), (((__SUNPRO_CC >> 8) & 0xf) * 10) + ((__SUNPRO_CC >> 4) & 0xf), (__SUNPRO_CC & 0xf) * 10) +#elif defined(__SUNPRO_CC) + #define JSON_HEDLEY_SUNPRO_VERSION JSON_HEDLEY_VERSION_ENCODE((__SUNPRO_CC >> 8) & 0xf, (__SUNPRO_CC >> 4) & 0xf, (__SUNPRO_CC) & 0xf) +#endif + +#if defined(JSON_HEDLEY_SUNPRO_VERSION_CHECK) + #undef JSON_HEDLEY_SUNPRO_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_SUNPRO_VERSION) + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_SUNPRO_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_SUNPRO_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION +#endif +#if defined(__EMSCRIPTEN__) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION JSON_HEDLEY_VERSION_ENCODE(__EMSCRIPTEN_major__, __EMSCRIPTEN_minor__, __EMSCRIPTEN_tiny__) +#endif + +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK) + #undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_EMSCRIPTEN_VERSION) + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_EMSCRIPTEN_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION) + #undef JSON_HEDLEY_ARM_VERSION +#endif +#if defined(__CC_ARM) && defined(__ARMCOMPILER_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCOMPILER_VERSION / 1000000, (__ARMCOMPILER_VERSION % 1000000) / 10000, (__ARMCOMPILER_VERSION % 10000) / 100) +#elif defined(__CC_ARM) && defined(__ARMCC_VERSION) + #define JSON_HEDLEY_ARM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ARMCC_VERSION / 1000000, (__ARMCC_VERSION % 1000000) / 10000, (__ARMCC_VERSION % 10000) / 100) +#endif + +#if defined(JSON_HEDLEY_ARM_VERSION_CHECK) + #undef JSON_HEDLEY_ARM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_ARM_VERSION) + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_ARM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_ARM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION) + #undef JSON_HEDLEY_IBM_VERSION +#endif +#if defined(__ibmxl__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__ibmxl_version__, __ibmxl_release__, __ibmxl_modification__) +#elif defined(__xlC__) && defined(__xlC_ver__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, (__xlC_ver__ >> 8) & 0xff) +#elif defined(__xlC__) + #define JSON_HEDLEY_IBM_VERSION JSON_HEDLEY_VERSION_ENCODE(__xlC__ >> 8, __xlC__ & 0xff, 0) +#endif + +#if defined(JSON_HEDLEY_IBM_VERSION_CHECK) + #undef JSON_HEDLEY_IBM_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IBM_VERSION) + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IBM_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IBM_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_VERSION) + #undef JSON_HEDLEY_TI_VERSION +#endif +#if \ + defined(__TI_COMPILER_VERSION__) && \ + ( \ + defined(__TMS470__) || defined(__TI_ARM__) || \ + defined(__MSP430__) || \ + defined(__TMS320C2000__) \ + ) +#if (__TI_COMPILER_VERSION__ >= 16000000) + #define JSON_HEDLEY_TI_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif +#endif + +#if defined(JSON_HEDLEY_TI_VERSION_CHECK) + #undef JSON_HEDLEY_TI_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_VERSION) + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #undef JSON_HEDLEY_TI_CL2000_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C2000__) + #define JSON_HEDLEY_TI_CL2000_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL2000_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL2000_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL2000_VERSION) + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL2000_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL2000_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #undef JSON_HEDLEY_TI_CL430_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__MSP430__) + #define JSON_HEDLEY_TI_CL430_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL430_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL430_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL430_VERSION) + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL430_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL430_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #undef JSON_HEDLEY_TI_ARMCL_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && (defined(__TMS470__) || defined(__TI_ARM__)) + #define JSON_HEDLEY_TI_ARMCL_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION_CHECK) + #undef JSON_HEDLEY_TI_ARMCL_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_ARMCL_VERSION) + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_ARMCL_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #undef JSON_HEDLEY_TI_CL6X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__TMS320C6X__) + #define JSON_HEDLEY_TI_CL6X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL6X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL6X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL6X_VERSION) + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL6X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL6X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #undef JSON_HEDLEY_TI_CL7X_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__C7000__) + #define JSON_HEDLEY_TI_CL7X_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CL7X_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CL7X_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CL7X_VERSION) + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CL7X_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CL7X_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #undef JSON_HEDLEY_TI_CLPRU_VERSION +#endif +#if defined(__TI_COMPILER_VERSION__) && defined(__PRU__) + #define JSON_HEDLEY_TI_CLPRU_VERSION JSON_HEDLEY_VERSION_ENCODE(__TI_COMPILER_VERSION__ / 1000000, (__TI_COMPILER_VERSION__ % 1000000) / 1000, (__TI_COMPILER_VERSION__ % 1000)) +#endif + +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION_CHECK) + #undef JSON_HEDLEY_TI_CLPRU_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TI_CLPRU_VERSION) + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TI_CLPRU_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION) + #undef JSON_HEDLEY_CRAY_VERSION +#endif +#if defined(_CRAYC) + #if defined(_RELEASE_PATCHLEVEL) + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, _RELEASE_PATCHLEVEL) + #else + #define JSON_HEDLEY_CRAY_VERSION JSON_HEDLEY_VERSION_ENCODE(_RELEASE_MAJOR, _RELEASE_MINOR, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_CRAY_VERSION_CHECK) + #undef JSON_HEDLEY_CRAY_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_CRAY_VERSION) + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_CRAY_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_CRAY_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION) + #undef JSON_HEDLEY_IAR_VERSION +#endif +#if defined(__IAR_SYSTEMS_ICC__) + #if __VER__ > 1000 + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE((__VER__ / 1000000), ((__VER__ / 1000) % 1000), (__VER__ % 1000)) + #else + #define JSON_HEDLEY_IAR_VERSION JSON_HEDLEY_VERSION_ENCODE(__VER__ / 100, __VER__ % 100, 0) + #endif +#endif + +#if defined(JSON_HEDLEY_IAR_VERSION_CHECK) + #undef JSON_HEDLEY_IAR_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_IAR_VERSION) + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_IAR_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_IAR_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION) + #undef JSON_HEDLEY_TINYC_VERSION +#endif +#if defined(__TINYC__) + #define JSON_HEDLEY_TINYC_VERSION JSON_HEDLEY_VERSION_ENCODE(__TINYC__ / 1000, (__TINYC__ / 100) % 10, __TINYC__ % 100) +#endif + +#if defined(JSON_HEDLEY_TINYC_VERSION_CHECK) + #undef JSON_HEDLEY_TINYC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_TINYC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_TINYC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION) + #undef JSON_HEDLEY_DMC_VERSION +#endif +#if defined(__DMC__) + #define JSON_HEDLEY_DMC_VERSION JSON_HEDLEY_VERSION_ENCODE(__DMC__ >> 8, (__DMC__ >> 4) & 0xf, __DMC__ & 0xf) +#endif + +#if defined(JSON_HEDLEY_DMC_VERSION_CHECK) + #undef JSON_HEDLEY_DMC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_DMC_VERSION) + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_DMC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_DMC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #undef JSON_HEDLEY_COMPCERT_VERSION +#endif +#if defined(__COMPCERT_VERSION__) + #define JSON_HEDLEY_COMPCERT_VERSION JSON_HEDLEY_VERSION_ENCODE(__COMPCERT_VERSION__ / 10000, (__COMPCERT_VERSION__ / 100) % 100, __COMPCERT_VERSION__ % 100) +#endif + +#if defined(JSON_HEDLEY_COMPCERT_VERSION_CHECK) + #undef JSON_HEDLEY_COMPCERT_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_COMPCERT_VERSION) + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_COMPCERT_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_COMPCERT_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION) + #undef JSON_HEDLEY_PELLES_VERSION +#endif +#if defined(__POCC__) + #define JSON_HEDLEY_PELLES_VERSION JSON_HEDLEY_VERSION_ENCODE(__POCC__ / 100, __POCC__ % 100, 0) +#endif + +#if defined(JSON_HEDLEY_PELLES_VERSION_CHECK) + #undef JSON_HEDLEY_PELLES_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_PELLES_VERSION) + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_PELLES_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_PELLES_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #undef JSON_HEDLEY_MCST_LCC_VERSION +#endif +#if defined(__LCC__) && defined(__LCC_MINOR__) + #define JSON_HEDLEY_MCST_LCC_VERSION JSON_HEDLEY_VERSION_ENCODE(__LCC__ / 100, __LCC__ % 100, __LCC_MINOR__) +#endif + +#if defined(JSON_HEDLEY_MCST_LCC_VERSION_CHECK) + #undef JSON_HEDLEY_MCST_LCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_MCST_LCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_MCST_LCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION) + #undef JSON_HEDLEY_GCC_VERSION +#endif +#if \ + defined(JSON_HEDLEY_GNUC_VERSION) && \ + !defined(__clang__) && \ + !defined(JSON_HEDLEY_INTEL_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_ARM_VERSION) && \ + !defined(JSON_HEDLEY_CRAY_VERSION) && \ + !defined(JSON_HEDLEY_TI_VERSION) && \ + !defined(JSON_HEDLEY_TI_ARMCL_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL430_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL2000_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL6X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CL7X_VERSION) && \ + !defined(JSON_HEDLEY_TI_CLPRU_VERSION) && \ + !defined(__COMPCERT__) && \ + !defined(JSON_HEDLEY_MCST_LCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION JSON_HEDLEY_GNUC_VERSION +#endif + +#if defined(JSON_HEDLEY_GCC_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_VERSION_CHECK +#endif +#if defined(JSON_HEDLEY_GCC_VERSION) + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (JSON_HEDLEY_GCC_VERSION >= JSON_HEDLEY_VERSION_ENCODE(major, minor, patch)) +#else + #define JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_ATTRIBUTE +#endif +#if \ + defined(__has_attribute) && \ + ( \ + (!defined(JSON_HEDLEY_IAR_VERSION) || JSON_HEDLEY_IAR_VERSION_CHECK(8,5,9)) \ + ) +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) __has_attribute(attribute) +#else +# define JSON_HEDLEY_HAS_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_ATTRIBUTE +#endif +#if defined(__has_attribute) + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE +#endif +#if \ + defined(__has_cpp_attribute) && \ + defined(__cplusplus) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS) + #undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS +#endif +#if !defined(__cplusplus) || !defined(__has_cpp_attribute) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#elif \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION) && \ + (!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ + (!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0)) + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute) +#else + #define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE +#endif +#if defined(__has_cpp_attribute) && defined(__cplusplus) + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) __has_cpp_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_CPP_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_BUILTIN) + #undef JSON_HEDLEY_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_HAS_BUILTIN(builtin) __has_builtin(builtin) +#else + #define JSON_HEDLEY_HAS_BUILTIN(builtin) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_BUILTIN) + #undef JSON_HEDLEY_GNUC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GNUC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_BUILTIN) + #undef JSON_HEDLEY_GCC_HAS_BUILTIN +#endif +#if defined(__has_builtin) + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) __has_builtin(builtin) +#else + #define JSON_HEDLEY_GCC_HAS_BUILTIN(builtin,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_FEATURE) + #undef JSON_HEDLEY_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_HAS_FEATURE(feature) __has_feature(feature) +#else + #define JSON_HEDLEY_HAS_FEATURE(feature) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_FEATURE) + #undef JSON_HEDLEY_GNUC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GNUC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_FEATURE) + #undef JSON_HEDLEY_GCC_HAS_FEATURE +#endif +#if defined(__has_feature) + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) __has_feature(feature) +#else + #define JSON_HEDLEY_GCC_HAS_FEATURE(feature,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_EXTENSION) + #undef JSON_HEDLEY_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_HAS_EXTENSION(extension) __has_extension(extension) +#else + #define JSON_HEDLEY_HAS_EXTENSION(extension) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_EXTENSION) + #undef JSON_HEDLEY_GNUC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GNUC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_EXTENSION) + #undef JSON_HEDLEY_GCC_HAS_EXTENSION +#endif +#if defined(__has_extension) + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) __has_extension(extension) +#else + #define JSON_HEDLEY_GCC_HAS_EXTENSION(extension,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GNUC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE +#endif +#if defined(__has_declspec_attribute) + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) __has_declspec_attribute(attribute) +#else + #define JSON_HEDLEY_GCC_HAS_DECLSPEC_ATTRIBUTE(attribute,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_HAS_WARNING) + #undef JSON_HEDLEY_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_HAS_WARNING(warning) __has_warning(warning) +#else + #define JSON_HEDLEY_HAS_WARNING(warning) (0) +#endif + +#if defined(JSON_HEDLEY_GNUC_HAS_WARNING) + #undef JSON_HEDLEY_GNUC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GNUC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GNUC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_GCC_HAS_WARNING) + #undef JSON_HEDLEY_GCC_HAS_WARNING +#endif +#if defined(__has_warning) + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) __has_warning(warning) +#else + #define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + defined(__clang__) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,17) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(8,0,0) || \ + (JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) && defined(__C99_PRAGMA_OPERATOR)) + #define JSON_HEDLEY_PRAGMA(value) _Pragma(#value) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_PRAGMA(value) __pragma(value) +#else + #define JSON_HEDLEY_PRAGMA(value) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_PUSH) + #undef JSON_HEDLEY_DIAGNOSTIC_PUSH +#endif +#if defined(JSON_HEDLEY_DIAGNOSTIC_POP) + #undef JSON_HEDLEY_DIAGNOSTIC_POP +#endif +#if defined(__clang__) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("clang diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("clang diagnostic pop") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("GCC diagnostic push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("GCC diagnostic pop") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH __pragma(warning(push)) + #define JSON_HEDLEY_DIAGNOSTIC_POP __pragma(warning(pop)) +#elif JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("pop") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("diag_push") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("diag_pop") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_PUSH _Pragma("warning(push)") + #define JSON_HEDLEY_DIAGNOSTIC_POP _Pragma("warning(pop)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_PUSH + #define JSON_HEDLEY_DIAGNOSTIC_POP +#endif + +/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat") +# if JSON_HEDLEY_HAS_WARNING("-Wc++17-extensions") +# if JSON_HEDLEY_HAS_WARNING("-Wc++1z-extensions") +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + _Pragma("clang diagnostic ignored \"-Wc++1z-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + _Pragma("clang diagnostic ignored \"-Wc++17-extensions\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# else +# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ + xpr \ + JSON_HEDLEY_DIAGNOSTIC_POP +# endif +# endif +#endif +#if !defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x +#endif + +#if defined(JSON_HEDLEY_CONST_CAST) + #undef JSON_HEDLEY_CONST_CAST +#endif +#if defined(__cplusplus) +# define JSON_HEDLEY_CONST_CAST(T, expr) (const_cast(expr)) +#elif \ + JSON_HEDLEY_HAS_WARNING("-Wcast-qual") || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_CONST_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_CONST_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_REINTERPRET_CAST) + #undef JSON_HEDLEY_REINTERPRET_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) (reinterpret_cast(expr)) +#else + #define JSON_HEDLEY_REINTERPRET_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_STATIC_CAST) + #undef JSON_HEDLEY_STATIC_CAST +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_STATIC_CAST(T, expr) (static_cast(expr)) +#else + #define JSON_HEDLEY_STATIC_CAST(T, expr) ((T) (expr)) +#endif + +#if defined(JSON_HEDLEY_CPP_CAST) + #undef JSON_HEDLEY_CPP_CAST +#endif +#if defined(__cplusplus) +# if JSON_HEDLEY_HAS_WARNING("-Wold-style-cast") +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wold-style-cast\"") \ + ((T) (expr)) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# elif JSON_HEDLEY_IAR_VERSION_CHECK(8,3,0) +# define JSON_HEDLEY_CPP_CAST(T, expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("diag_suppress=Pe137") \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_CPP_CAST(T, expr) ((T) (expr)) +# endif +#else +# define JSON_HEDLEY_CPP_CAST(T, expr) (expr) +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wdeprecated-declarations") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warning(disable:1478 1786)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:1478 1786)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1216,1444,1445") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED __pragma(warning(disable:4996)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1215,1444") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress 1291,1718") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && !defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,E_DEPRECATED_ATT,E_DEPRECATED_ATT_MESS)") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("error_messages(off,symdeprecated,symdeprecated2)") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("diag_suppress=Pe1444,Pe1215") +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,90,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED _Pragma("warn(disable:2241)") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("clang diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("warning(disable:161)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:161)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 1675") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("GCC diagnostic ignored \"-Wunknown-pragmas\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS __pragma(warning(disable:4068)) +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(16,9,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 163") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress=Pe161") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS _Pragma("diag_suppress 161") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)") +#elif JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:1292)) +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030)) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(20,7,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097,1098") +#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)") +#elif \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress=Pe1097") +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wcast-qual") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("clang diagnostic ignored \"-Wcast-qual\"") +#elif JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("warning(disable:2203 2331)") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL _Pragma("GCC diagnostic ignored \"-Wcast-qual\"") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL +#endif + +#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION) + #undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunused-function") + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("clang diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("GCC diagnostic ignored \"-Wunused-function\"") +#elif JSON_HEDLEY_MSVC_VERSION_CHECK(1,0,0) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION __pragma(warning(disable:4505)) +#elif JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION _Pragma("diag_suppress 3142") +#else + #define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNUSED_FUNCTION +#endif + +#if defined(JSON_HEDLEY_DEPRECATED) + #undef JSON_HEDLEY_DEPRECATED +#endif +#if defined(JSON_HEDLEY_DEPRECATED_FOR) + #undef JSON_HEDLEY_DEPRECATED_FOR +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated("Since " # since)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated("Since " #since "; use " #replacement)) +#elif \ + (JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,13,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(18,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,3,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,3,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__("Since " #since))) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__("Since " #since "; use " #replacement))) +#elif defined(__cplusplus) && (__cplusplus >= 201402L) + #define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]]) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(deprecated) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_DEPRECATED(since) __attribute__((__deprecated__)) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __attribute__((__deprecated__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated") + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) _Pragma("deprecated") +#else + #define JSON_HEDLEY_DEPRECATED(since) + #define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) +#endif + +#if defined(JSON_HEDLEY_UNAVAILABLE) + #undef JSON_HEDLEY_UNAVAILABLE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warning) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNAVAILABLE(available_since) __attribute__((__warning__("Not available until " #available_since))) +#else + #define JSON_HEDLEY_UNAVAILABLE(available_since) +#endif + +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT +#endif +#if defined(JSON_HEDLEY_WARN_UNUSED_RESULT_MSG) + #undef JSON_HEDLEY_WARN_UNUSED_RESULT_MSG +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__)) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) __attribute__((__warn_unused_result__)) +#elif (JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) >= 201907L) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard(msg)]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(nodiscard) + #define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) +#elif defined(_Check_return_) /* SAL */ + #define JSON_HEDLEY_WARN_UNUSED_RESULT _Check_return_ + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) _Check_return_ +#else + #define JSON_HEDLEY_WARN_UNUSED_RESULT + #define JSON_HEDLEY_WARN_UNUSED_RESULT_MSG(msg) +#endif + +#if defined(JSON_HEDLEY_SENTINEL) + #undef JSON_HEDLEY_SENTINEL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(sentinel) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_SENTINEL(position) __attribute__((__sentinel__(position))) +#else + #define JSON_HEDLEY_SENTINEL(position) +#endif + +#if defined(JSON_HEDLEY_NO_RETURN) + #undef JSON_HEDLEY_NO_RETURN +#endif +#if JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NO_RETURN __noreturn +#elif \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L + #define JSON_HEDLEY_NO_RETURN _Noreturn +#elif defined(__cplusplus) && (__cplusplus >= 201103L) + #define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]]) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NO_RETURN _Pragma("FUNC_NEVER_RETURNS;") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NO_RETURN __attribute((noreturn)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NO_RETURN __declspec(noreturn) +#else + #define JSON_HEDLEY_NO_RETURN +#endif + +#if defined(JSON_HEDLEY_NO_ESCAPE) + #undef JSON_HEDLEY_NO_ESCAPE +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape) + #define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__)) +#else + #define JSON_HEDLEY_NO_ESCAPE +#endif + +#if defined(JSON_HEDLEY_UNREACHABLE) + #undef JSON_HEDLEY_UNREACHABLE +#endif +#if defined(JSON_HEDLEY_UNREACHABLE_RETURN) + #undef JSON_HEDLEY_UNREACHABLE_RETURN +#endif +#if defined(JSON_HEDLEY_ASSUME) + #undef JSON_HEDLEY_ASSUME +#endif +#if \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_ASSUME(expr) __assume(expr) +#elif JSON_HEDLEY_HAS_BUILTIN(__builtin_assume) + #define JSON_HEDLEY_ASSUME(expr) __builtin_assume(expr) +#elif \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #if defined(__cplusplus) + #define JSON_HEDLEY_ASSUME(expr) std::_nassert(expr) + #else + #define JSON_HEDLEY_ASSUME(expr) _nassert(expr) + #endif +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_unreachable) && (!defined(JSON_HEDLEY_ARM_VERSION))) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,10,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,5) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(10,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_UNREACHABLE() __builtin_unreachable() +#elif defined(JSON_HEDLEY_ASSUME) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif +#if !defined(JSON_HEDLEY_ASSUME) + #if defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, ((expr) ? 1 : (JSON_HEDLEY_UNREACHABLE(), 1))) + #else + #define JSON_HEDLEY_ASSUME(expr) JSON_HEDLEY_STATIC_CAST(void, expr) + #endif +#endif +#if defined(JSON_HEDLEY_UNREACHABLE) + #if \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (JSON_HEDLEY_STATIC_CAST(void, JSON_HEDLEY_ASSUME(0)), (value)) + #else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) JSON_HEDLEY_UNREACHABLE() + #endif +#else + #define JSON_HEDLEY_UNREACHABLE_RETURN(value) return (value) +#endif +#if !defined(JSON_HEDLEY_UNREACHABLE) + #define JSON_HEDLEY_UNREACHABLE() JSON_HEDLEY_ASSUME(0) +#endif + +JSON_HEDLEY_DIAGNOSTIC_PUSH +#if JSON_HEDLEY_HAS_WARNING("-Wpedantic") + #pragma clang diagnostic ignored "-Wpedantic" +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus) + #pragma clang diagnostic ignored "-Wc++98-compat-pedantic" +#endif +#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0) + #if defined(__clang__) + #pragma clang diagnostic ignored "-Wvariadic-macros" + #elif defined(JSON_HEDLEY_GCC_VERSION) + #pragma GCC diagnostic ignored "-Wvariadic-macros" + #endif +#endif +#if defined(JSON_HEDLEY_NON_NULL) + #undef JSON_HEDLEY_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NON_NULL(...) __attribute__((__nonnull__(__VA_ARGS__))) +#else + #define JSON_HEDLEY_NON_NULL(...) +#endif +JSON_HEDLEY_DIAGNOSTIC_POP + +#if defined(JSON_HEDLEY_PRINTF_FORMAT) + #undef JSON_HEDLEY_PRINTF_FORMAT +#endif +#if defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && !defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(ms_printf, string_idx, first_to_check))) +#elif defined(__MINGW32__) && JSON_HEDLEY_GCC_HAS_ATTRIBUTE(format,4,4,0) && defined(__USE_MINGW_ANSI_STDIO) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(gnu_printf, string_idx, first_to_check))) +#elif \ + JSON_HEDLEY_HAS_ATTRIBUTE(format) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,6,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __attribute__((__format__(__printf__, string_idx, first_to_check))) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(6,0,0) + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) __declspec(vaformat(printf,string_idx,first_to_check)) +#else + #define JSON_HEDLEY_PRINTF_FORMAT(string_idx,first_to_check) +#endif + +#if defined(JSON_HEDLEY_CONSTEXPR) + #undef JSON_HEDLEY_CONSTEXPR +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr) + #endif +#endif +#if !defined(JSON_HEDLEY_CONSTEXPR) + #define JSON_HEDLEY_CONSTEXPR +#endif + +#if defined(JSON_HEDLEY_PREDICT) + #undef JSON_HEDLEY_PREDICT +#endif +#if defined(JSON_HEDLEY_LIKELY) + #undef JSON_HEDLEY_LIKELY +#endif +#if defined(JSON_HEDLEY_UNLIKELY) + #undef JSON_HEDLEY_UNLIKELY +#endif +#if defined(JSON_HEDLEY_UNPREDICTABLE) + #undef JSON_HEDLEY_UNPREDICTABLE +#endif +#if JSON_HEDLEY_HAS_BUILTIN(__builtin_unpredictable) + #define JSON_HEDLEY_UNPREDICTABLE(expr) __builtin_unpredictable((expr)) +#endif +#if \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect_with_probability) && !defined(JSON_HEDLEY_PGI_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(9,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, value, probability) __builtin_expect_with_probability( (expr), (value), (probability)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) __builtin_expect_with_probability(!!(expr), 1 , (probability)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) __builtin_expect_with_probability(!!(expr), 0 , (probability)) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect (!!(expr), 1 ) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect (!!(expr), 0 ) +#elif \ + (JSON_HEDLEY_HAS_BUILTIN(__builtin_expect) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,7,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,27) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PREDICT(expr, expected, probability) \ + (((probability) >= 0.9) ? __builtin_expect((expr), (expected)) : (JSON_HEDLEY_STATIC_CAST(void, expected), (expr))) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 1) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 0) : !!(expr))); \ + })) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) \ + (__extension__ ({ \ + double hedley_probability_ = (probability); \ + ((hedley_probability_ >= 0.9) ? __builtin_expect(!!(expr), 0) : ((hedley_probability_ <= 0.1) ? __builtin_expect(!!(expr), 1) : !!(expr))); \ + })) +# define JSON_HEDLEY_LIKELY(expr) __builtin_expect(!!(expr), 1) +# define JSON_HEDLEY_UNLIKELY(expr) __builtin_expect(!!(expr), 0) +#else +# define JSON_HEDLEY_PREDICT(expr, expected, probability) (JSON_HEDLEY_STATIC_CAST(void, expected), (expr)) +# define JSON_HEDLEY_PREDICT_TRUE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_PREDICT_FALSE(expr, probability) (!!(expr)) +# define JSON_HEDLEY_LIKELY(expr) (!!(expr)) +# define JSON_HEDLEY_UNLIKELY(expr) (!!(expr)) +#endif +#if !defined(JSON_HEDLEY_UNPREDICTABLE) + #define JSON_HEDLEY_UNPREDICTABLE(expr) JSON_HEDLEY_PREDICT(expr, 1, 0.5) +#endif + +#if defined(JSON_HEDLEY_MALLOC) + #undef JSON_HEDLEY_MALLOC +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(malloc) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_MALLOC __attribute__((__malloc__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory") +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_MALLOC __declspec(restrict) +#else + #define JSON_HEDLEY_MALLOC +#endif + +#if defined(JSON_HEDLEY_PURE) + #undef JSON_HEDLEY_PURE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(pure) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,96,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PURE __attribute__((__pure__)) +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) +# define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data") +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(2,0,1) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) \ + ) +# define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;") +#else +# define JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_CONST) + #undef JSON_HEDLEY_CONST +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(const) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(2,5,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_CONST __attribute__((__const__)) +#elif \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) + #define JSON_HEDLEY_CONST _Pragma("no_side_effect") +#else + #define JSON_HEDLEY_CONST JSON_HEDLEY_PURE +#endif + +#if defined(JSON_HEDLEY_RESTRICT) + #undef JSON_HEDLEY_RESTRICT +#endif +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT restrict +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(14,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,4) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus)) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) || \ + defined(__clang__) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RESTRICT __restrict +#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,3,0) && !defined(__cplusplus) + #define JSON_HEDLEY_RESTRICT _Restrict +#else + #define JSON_HEDLEY_RESTRICT +#endif + +#if defined(JSON_HEDLEY_INLINE) + #undef JSON_HEDLEY_INLINE +#endif +#if \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ + (defined(__cplusplus) && (__cplusplus >= 199711L)) + #define JSON_HEDLEY_INLINE inline +#elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(6,2,0) + #define JSON_HEDLEY_INLINE __inline__ +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,1,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(3,1,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,2,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(8,0,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_INLINE __inline +#else + #define JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_ALWAYS_INLINE) + #undef JSON_HEDLEY_ALWAYS_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(always_inline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) +# define JSON_HEDLEY_ALWAYS_INLINE __attribute__((__always_inline__)) JSON_HEDLEY_INLINE +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(12,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_ALWAYS_INLINE __forceinline +#elif defined(__cplusplus) && \ + ( \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) \ + ) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("FUNC_ALWAYS_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_ALWAYS_INLINE _Pragma("inline=forced") +#else +# define JSON_HEDLEY_ALWAYS_INLINE JSON_HEDLEY_INLINE +#endif + +#if defined(JSON_HEDLEY_NEVER_INLINE) + #undef JSON_HEDLEY_NEVER_INLINE +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(noinline) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(10,1,0) || \ + JSON_HEDLEY_TI_VERSION_CHECK(15,12,0) || \ + (JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(4,8,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_ARMCL_VERSION_CHECK(5,2,0) || \ + (JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL2000_VERSION_CHECK(6,4,0) || \ + (JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,0,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(4,3,0) || \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) || \ + JSON_HEDLEY_TI_CL7X_VERSION_CHECK(1,2,0) || \ + JSON_HEDLEY_TI_CLPRU_VERSION_CHECK(2,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) || \ + JSON_HEDLEY_IAR_VERSION_CHECK(8,10,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute__((__noinline__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#elif JSON_HEDLEY_PGI_VERSION_CHECK(10,2,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("noinline") +#elif JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,0,0) && defined(__cplusplus) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("FUNC_CANNOT_INLINE;") +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) + #define JSON_HEDLEY_NEVER_INLINE _Pragma("inline=never") +#elif JSON_HEDLEY_COMPCERT_VERSION_CHECK(3,2,0) + #define JSON_HEDLEY_NEVER_INLINE __attribute((noinline)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(9,0,0) + #define JSON_HEDLEY_NEVER_INLINE __declspec(noinline) +#else + #define JSON_HEDLEY_NEVER_INLINE +#endif + +#if defined(JSON_HEDLEY_PRIVATE) + #undef JSON_HEDLEY_PRIVATE +#endif +#if defined(JSON_HEDLEY_PUBLIC) + #undef JSON_HEDLEY_PUBLIC +#endif +#if defined(JSON_HEDLEY_IMPORT) + #undef JSON_HEDLEY_IMPORT +#endif +#if defined(_WIN32) || defined(__CYGWIN__) +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC __declspec(dllexport) +# define JSON_HEDLEY_IMPORT __declspec(dllimport) +#else +# if \ + JSON_HEDLEY_HAS_ATTRIBUTE(visibility) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,11,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + ( \ + defined(__TI_EABI__) && \ + ( \ + (JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,2,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(7,5,0) \ + ) \ + ) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) +# define JSON_HEDLEY_PRIVATE __attribute__((__visibility__("hidden"))) +# define JSON_HEDLEY_PUBLIC __attribute__((__visibility__("default"))) +# else +# define JSON_HEDLEY_PRIVATE +# define JSON_HEDLEY_PUBLIC +# endif +# define JSON_HEDLEY_IMPORT extern +#endif + +#if defined(JSON_HEDLEY_NO_THROW) + #undef JSON_HEDLEY_NO_THROW +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(nothrow) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,3,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_NO_THROW __attribute__((__nothrow__)) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) + #define JSON_HEDLEY_NO_THROW __declspec(nothrow) +#else + #define JSON_HEDLEY_NO_THROW +#endif + +#if defined(JSON_HEDLEY_FALL_THROUGH) + #undef JSON_HEDLEY_FALL_THROUGH +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(fallthrough) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]]) +#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough) + #define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]]) +#elif defined(__fallthrough) /* SAL */ + #define JSON_HEDLEY_FALL_THROUGH __fallthrough +#else + #define JSON_HEDLEY_FALL_THROUGH +#endif + +#if defined(JSON_HEDLEY_RETURNS_NON_NULL) + #undef JSON_HEDLEY_RETURNS_NON_NULL +#endif +#if \ + JSON_HEDLEY_HAS_ATTRIBUTE(returns_nonnull) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_RETURNS_NON_NULL __attribute__((__returns_nonnull__)) +#elif defined(_Ret_notnull_) /* SAL */ + #define JSON_HEDLEY_RETURNS_NON_NULL _Ret_notnull_ +#else + #define JSON_HEDLEY_RETURNS_NON_NULL +#endif + +#if defined(JSON_HEDLEY_ARRAY_PARAM) + #undef JSON_HEDLEY_ARRAY_PARAM +#endif +#if \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ + !defined(__STDC_NO_VLA__) && \ + !defined(__cplusplus) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_TINYC_VERSION) + #define JSON_HEDLEY_ARRAY_PARAM(name) (name) +#else + #define JSON_HEDLEY_ARRAY_PARAM(name) +#endif + +#if defined(JSON_HEDLEY_IS_CONSTANT) + #undef JSON_HEDLEY_IS_CONSTANT +#endif +#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR) + #undef JSON_HEDLEY_REQUIRE_CONSTEXPR +#endif +/* JSON_HEDLEY_IS_CONSTEXPR_ is for + HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #undef JSON_HEDLEY_IS_CONSTEXPR_ +#endif +#if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,19) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_TI_CL6X_VERSION_CHECK(6,1,0) || \ + (JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_MCST_LCC_VERSION_CHECK(1,25,10) + #define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr) +#endif +#if !defined(__cplusplus) +# if \ + JSON_HEDLEY_HAS_BUILTIN(__builtin_types_compatible_p) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ + JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ + JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) +#else + #include + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) +#endif +# elif \ + ( \ + defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && \ + !defined(JSON_HEDLEY_SUNPRO_VERSION) && \ + !defined(JSON_HEDLEY_PGI_VERSION) && \ + !defined(JSON_HEDLEY_IAR_VERSION)) || \ + (JSON_HEDLEY_HAS_EXTENSION(c_generic_selections) && !defined(JSON_HEDLEY_IAR_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,9,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) || \ + JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ + JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0) +#if defined(__INTPTR_TYPE__) + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) +#else + #include + #define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) +#endif +# elif \ + defined(JSON_HEDLEY_GCC_VERSION) || \ + defined(JSON_HEDLEY_INTEL_VERSION) || \ + defined(JSON_HEDLEY_TINYC_VERSION) || \ + defined(JSON_HEDLEY_TI_ARMCL_VERSION) || \ + JSON_HEDLEY_TI_CL430_VERSION_CHECK(18,12,0) || \ + defined(JSON_HEDLEY_TI_CL2000_VERSION) || \ + defined(JSON_HEDLEY_TI_CL6X_VERSION) || \ + defined(JSON_HEDLEY_TI_CL7X_VERSION) || \ + defined(JSON_HEDLEY_TI_CLPRU_VERSION) || \ + defined(__clang__) +# define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \ + sizeof(void) != \ + sizeof(*( \ + 1 ? \ + ((void*) ((expr) * 0L) ) : \ +((struct { char v[sizeof(void) * 2]; } *) 1) \ + ) \ + ) \ + ) +# endif +#endif +#if defined(JSON_HEDLEY_IS_CONSTEXPR_) + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1)) +#else + #if !defined(JSON_HEDLEY_IS_CONSTANT) + #define JSON_HEDLEY_IS_CONSTANT(expr) (0) + #endif + #define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (expr) +#endif + +#if defined(JSON_HEDLEY_BEGIN_C_DECLS) + #undef JSON_HEDLEY_BEGIN_C_DECLS +#endif +#if defined(JSON_HEDLEY_END_C_DECLS) + #undef JSON_HEDLEY_END_C_DECLS +#endif +#if defined(JSON_HEDLEY_C_DECL) + #undef JSON_HEDLEY_C_DECL +#endif +#if defined(__cplusplus) + #define JSON_HEDLEY_BEGIN_C_DECLS extern "C" { + #define JSON_HEDLEY_END_C_DECLS } + #define JSON_HEDLEY_C_DECL extern "C" +#else + #define JSON_HEDLEY_BEGIN_C_DECLS + #define JSON_HEDLEY_END_C_DECLS + #define JSON_HEDLEY_C_DECL +#endif + +#if defined(JSON_HEDLEY_STATIC_ASSERT) + #undef JSON_HEDLEY_STATIC_ASSERT +#endif +#if \ + !defined(__cplusplus) && ( \ + (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)) || \ + (JSON_HEDLEY_HAS_FEATURE(c_static_assert) && !defined(JSON_HEDLEY_INTEL_CL_VERSION)) || \ + JSON_HEDLEY_GCC_VERSION_CHECK(6,0,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) || \ + defined(_Static_assert) \ + ) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message) +#elif \ + (defined(__cplusplus) && (__cplusplus >= 201103L)) || \ + JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message)) +#else +# define JSON_HEDLEY_STATIC_ASSERT(expr, message) +#endif + +#if defined(JSON_HEDLEY_NULL) + #undef JSON_HEDLEY_NULL +#endif +#if defined(__cplusplus) + #if __cplusplus >= 201103L + #define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr) + #elif defined(NULL) + #define JSON_HEDLEY_NULL NULL + #else + #define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0) + #endif +#elif defined(NULL) + #define JSON_HEDLEY_NULL NULL +#else + #define JSON_HEDLEY_NULL ((void*) 0) +#endif + +#if defined(JSON_HEDLEY_MESSAGE) + #undef JSON_HEDLEY_MESSAGE +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_MESSAGE(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(message msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message msg) +#elif JSON_HEDLEY_CRAY_VERSION_CHECK(5,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(_CRI message msg) +#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#elif JSON_HEDLEY_PELLES_VERSION_CHECK(2,0,0) +# define JSON_HEDLEY_MESSAGE(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_WARNING) + #undef JSON_HEDLEY_WARNING +#endif +#if JSON_HEDLEY_HAS_WARNING("-Wunknown-pragmas") +# define JSON_HEDLEY_WARNING(msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS \ + JSON_HEDLEY_PRAGMA(clang warning msg) \ + JSON_HEDLEY_DIAGNOSTIC_POP +#elif \ + JSON_HEDLEY_GCC_VERSION_CHECK(4,8,0) || \ + JSON_HEDLEY_PGI_VERSION_CHECK(18,4,0) || \ + JSON_HEDLEY_INTEL_VERSION_CHECK(13,0,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(GCC warning msg) +#elif \ + JSON_HEDLEY_MSVC_VERSION_CHECK(15,0,0) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_PRAGMA(message(msg)) +#else +# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg) +#endif + +#if defined(JSON_HEDLEY_REQUIRE) + #undef JSON_HEDLEY_REQUIRE +#endif +#if defined(JSON_HEDLEY_REQUIRE_MSG) + #undef JSON_HEDLEY_REQUIRE_MSG +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if) +# if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat") +# define JSON_HEDLEY_REQUIRE(expr) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), #expr, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ + __attribute__((diagnose_if(!(expr), msg, "error"))) \ + JSON_HEDLEY_DIAGNOSTIC_POP +# else +# define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error"))) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error"))) +# endif +#else +# define JSON_HEDLEY_REQUIRE(expr) +# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) +#endif + +#if defined(JSON_HEDLEY_FLAGS) + #undef JSON_HEDLEY_FLAGS +#endif +#if JSON_HEDLEY_HAS_ATTRIBUTE(flag_enum) && (!defined(__cplusplus) || JSON_HEDLEY_HAS_WARNING("-Wbitfield-enum-conversion")) + #define JSON_HEDLEY_FLAGS __attribute__((__flag_enum__)) +#else + #define JSON_HEDLEY_FLAGS +#endif + +#if defined(JSON_HEDLEY_FLAGS_CAST) + #undef JSON_HEDLEY_FLAGS_CAST +#endif +#if JSON_HEDLEY_INTEL_VERSION_CHECK(19,0,0) +# define JSON_HEDLEY_FLAGS_CAST(T, expr) (__extension__ ({ \ + JSON_HEDLEY_DIAGNOSTIC_PUSH \ + _Pragma("warning(disable:188)") \ + ((T) (expr)); \ + JSON_HEDLEY_DIAGNOSTIC_POP \ + })) +#else +# define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr) +#endif + +#if defined(JSON_HEDLEY_EMPTY_BASES) + #undef JSON_HEDLEY_EMPTY_BASES +#endif +#if \ + (JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0)) || \ + JSON_HEDLEY_INTEL_CL_VERSION_CHECK(2021,1,0) + #define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases) +#else + #define JSON_HEDLEY_EMPTY_BASES +#endif + +/* Remaining macros are deprecated. */ + +#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK) + #undef JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK +#endif +#if defined(__clang__) + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) (0) +#else + #define JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK(major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) +#endif + +#if defined(JSON_HEDLEY_CLANG_HAS_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_BUILTIN) + #undef JSON_HEDLEY_CLANG_HAS_BUILTIN +#endif +#define JSON_HEDLEY_CLANG_HAS_BUILTIN(builtin) JSON_HEDLEY_HAS_BUILTIN(builtin) + +#if defined(JSON_HEDLEY_CLANG_HAS_FEATURE) + #undef JSON_HEDLEY_CLANG_HAS_FEATURE +#endif +#define JSON_HEDLEY_CLANG_HAS_FEATURE(feature) JSON_HEDLEY_HAS_FEATURE(feature) + +#if defined(JSON_HEDLEY_CLANG_HAS_EXTENSION) + #undef JSON_HEDLEY_CLANG_HAS_EXTENSION +#endif +#define JSON_HEDLEY_CLANG_HAS_EXTENSION(extension) JSON_HEDLEY_HAS_EXTENSION(extension) + +#if defined(JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE) + #undef JSON_HEDLEY_CLANG_HAS_DECLSPEC_DECLSPEC_ATTRIBUTE +#endif +#define JSON_HEDLEY_CLANG_HAS_DECLSPEC_ATTRIBUTE(attribute) JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE(attribute) + +#if defined(JSON_HEDLEY_CLANG_HAS_WARNING) + #undef JSON_HEDLEY_CLANG_HAS_WARNING +#endif +#define JSON_HEDLEY_CLANG_HAS_WARNING(warning) JSON_HEDLEY_HAS_WARNING(warning) + +#endif /* !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < X) */ + + +// This file contains all internal macro definitions (except those affecting ABI) +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + +// #include + + +// exclude unsupported compilers +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #endif +#endif + +// C++ language standard detection +// if the user manually specified the used c++ version this is skipped +#if !defined(JSON_HAS_CPP_20) && !defined(JSON_HAS_CPP_17) && !defined(JSON_HAS_CPP_14) && !defined(JSON_HAS_CPP_11) + #if (defined(__cplusplus) && __cplusplus >= 202002L) || (defined(_MSVC_LANG) && _MSVC_LANG >= 202002L) + #define JSON_HAS_CPP_20 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 + #elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) + #define JSON_HAS_CPP_14 + #endif + // the cpp 11 flag is always specified because it is the minimal required version + #define JSON_HAS_CPP_11 +#endif + +#ifdef __has_include + #if __has_include() + #include + #endif +#endif + +#if !defined(JSON_HAS_FILESYSTEM) && !defined(JSON_HAS_EXPERIMENTAL_FILESYSTEM) + #ifdef JSON_HAS_CPP_17 + #if defined(__cpp_lib_filesystem) + #define JSON_HAS_FILESYSTEM 1 + #elif defined(__cpp_lib_experimental_filesystem) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif !defined(__has_include) + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #elif __has_include() + #define JSON_HAS_FILESYSTEM 1 + #elif __has_include() + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 1 + #endif + + // std::filesystem does not work on MinGW GCC 8: https://sourceforge.net/p/mingw-w64/bugs/737/ + #if defined(__MINGW32__) && defined(__GNUC__) && __GNUC__ == 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before GCC 8: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__GNUC__) && !defined(__clang__) && __GNUC__ < 8 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before Clang 7: https://en.cppreference.com/w/cpp/compiler_support + #if defined(__clang_major__) && __clang_major__ < 7 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before MSVC 19.14: https://en.cppreference.com/w/cpp/compiler_support + #if defined(_MSC_VER) && _MSC_VER < 1914 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before iOS 13 + #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED < 130000 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + + // no filesystem support before macOS Catalina + #if defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED < 101500 + #undef JSON_HAS_FILESYSTEM + #undef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #endif + #endif +#endif + +#ifndef JSON_HAS_EXPERIMENTAL_FILESYSTEM + #define JSON_HAS_EXPERIMENTAL_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_FILESYSTEM + #define JSON_HAS_FILESYSTEM 0 +#endif + +#ifndef JSON_HAS_THREE_WAY_COMPARISON + #if defined(__cpp_impl_three_way_comparison) && __cpp_impl_three_way_comparison >= 201907L \ + && defined(__cpp_lib_three_way_comparison) && __cpp_lib_three_way_comparison >= 201907L + #define JSON_HAS_THREE_WAY_COMPARISON 1 + #else + #define JSON_HAS_THREE_WAY_COMPARISON 0 + #endif +#endif + +#ifndef JSON_HAS_RANGES + // ranges header shipping in GCC 11.1.0 (released 2021-04-27) has syntax error + #if defined(__GLIBCXX__) && __GLIBCXX__ == 20210427 + #define JSON_HAS_RANGES 0 + #elif defined(__cpp_lib_ranges) + #define JSON_HAS_RANGES 1 + #else + #define JSON_HAS_RANGES 0 + #endif +#endif + +#ifdef JSON_HAS_CPP_17 + #define JSON_INLINE_VARIABLE inline +#else + #define JSON_INLINE_VARIABLE +#endif + +#if JSON_HEDLEY_HAS_ATTRIBUTE(no_unique_address) + #define JSON_NO_UNIQUE_ADDRESS [[no_unique_address]] +#else + #define JSON_NO_UNIQUE_ADDRESS +#endif + +// disable documentation warnings on clang +#if defined(__clang__) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdocumentation" + #pragma clang diagnostic ignored "-Wdocumentation-unknown-command" +#endif + +// allow disabling exceptions +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) + #define JSON_THROW(exception) throw exception + #define JSON_TRY try + #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) +#else + #include + #define JSON_THROW(exception) std::abort() + #define JSON_TRY if(true) + #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER +#endif + +// allow overriding assert +#if !defined(JSON_ASSERT) + #include // assert + #define JSON_ASSERT(x) assert(x) +#endif + +// allow to access some private functions (needed by the test suite) +#if defined(JSON_TESTS_PRIVATE) + #define JSON_PRIVATE_UNLESS_TESTED public +#else + #define JSON_PRIVATE_UNLESS_TESTED private +#endif + +/*! +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 +*/ +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [&j](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } + +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. + +#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ + template class ObjectType, \ + template class ArrayType, \ + class StringType, class BooleanType, class NumberIntegerType, \ + class NumberUnsignedType, class NumberFloatType, \ + template class AllocatorType, \ + template class JSONSerializer, \ + class BinaryType> + +#define NLOHMANN_BASIC_JSON_TPL \ + basic_json + +// Macros to simplify conversion from/to types + +#define NLOHMANN_JSON_EXPAND( x ) x +#define NLOHMANN_JSON_GET_MACRO(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26, _27, _28, _29, _30, _31, _32, _33, _34, _35, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, NAME,...) NAME +#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \ + NLOHMANN_JSON_PASTE64, \ + NLOHMANN_JSON_PASTE63, \ + NLOHMANN_JSON_PASTE62, \ + NLOHMANN_JSON_PASTE61, \ + NLOHMANN_JSON_PASTE60, \ + NLOHMANN_JSON_PASTE59, \ + NLOHMANN_JSON_PASTE58, \ + NLOHMANN_JSON_PASTE57, \ + NLOHMANN_JSON_PASTE56, \ + NLOHMANN_JSON_PASTE55, \ + NLOHMANN_JSON_PASTE54, \ + NLOHMANN_JSON_PASTE53, \ + NLOHMANN_JSON_PASTE52, \ + NLOHMANN_JSON_PASTE51, \ + NLOHMANN_JSON_PASTE50, \ + NLOHMANN_JSON_PASTE49, \ + NLOHMANN_JSON_PASTE48, \ + NLOHMANN_JSON_PASTE47, \ + NLOHMANN_JSON_PASTE46, \ + NLOHMANN_JSON_PASTE45, \ + NLOHMANN_JSON_PASTE44, \ + NLOHMANN_JSON_PASTE43, \ + NLOHMANN_JSON_PASTE42, \ + NLOHMANN_JSON_PASTE41, \ + NLOHMANN_JSON_PASTE40, \ + NLOHMANN_JSON_PASTE39, \ + NLOHMANN_JSON_PASTE38, \ + NLOHMANN_JSON_PASTE37, \ + NLOHMANN_JSON_PASTE36, \ + NLOHMANN_JSON_PASTE35, \ + NLOHMANN_JSON_PASTE34, \ + NLOHMANN_JSON_PASTE33, \ + NLOHMANN_JSON_PASTE32, \ + NLOHMANN_JSON_PASTE31, \ + NLOHMANN_JSON_PASTE30, \ + NLOHMANN_JSON_PASTE29, \ + NLOHMANN_JSON_PASTE28, \ + NLOHMANN_JSON_PASTE27, \ + NLOHMANN_JSON_PASTE26, \ + NLOHMANN_JSON_PASTE25, \ + NLOHMANN_JSON_PASTE24, \ + NLOHMANN_JSON_PASTE23, \ + NLOHMANN_JSON_PASTE22, \ + NLOHMANN_JSON_PASTE21, \ + NLOHMANN_JSON_PASTE20, \ + NLOHMANN_JSON_PASTE19, \ + NLOHMANN_JSON_PASTE18, \ + NLOHMANN_JSON_PASTE17, \ + NLOHMANN_JSON_PASTE16, \ + NLOHMANN_JSON_PASTE15, \ + NLOHMANN_JSON_PASTE14, \ + NLOHMANN_JSON_PASTE13, \ + NLOHMANN_JSON_PASTE12, \ + NLOHMANN_JSON_PASTE11, \ + NLOHMANN_JSON_PASTE10, \ + NLOHMANN_JSON_PASTE9, \ + NLOHMANN_JSON_PASTE8, \ + NLOHMANN_JSON_PASTE7, \ + NLOHMANN_JSON_PASTE6, \ + NLOHMANN_JSON_PASTE5, \ + NLOHMANN_JSON_PASTE4, \ + NLOHMANN_JSON_PASTE3, \ + NLOHMANN_JSON_PASTE2, \ + NLOHMANN_JSON_PASTE1)(__VA_ARGS__)) +#define NLOHMANN_JSON_PASTE2(func, v1) func(v1) +#define NLOHMANN_JSON_PASTE3(func, v1, v2) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE2(func, v2) +#define NLOHMANN_JSON_PASTE4(func, v1, v2, v3) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE3(func, v2, v3) +#define NLOHMANN_JSON_PASTE5(func, v1, v2, v3, v4) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE4(func, v2, v3, v4) +#define NLOHMANN_JSON_PASTE6(func, v1, v2, v3, v4, v5) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE5(func, v2, v3, v4, v5) +#define NLOHMANN_JSON_PASTE7(func, v1, v2, v3, v4, v5, v6) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE6(func, v2, v3, v4, v5, v6) +#define NLOHMANN_JSON_PASTE8(func, v1, v2, v3, v4, v5, v6, v7) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE7(func, v2, v3, v4, v5, v6, v7) +#define NLOHMANN_JSON_PASTE9(func, v1, v2, v3, v4, v5, v6, v7, v8) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE8(func, v2, v3, v4, v5, v6, v7, v8) +#define NLOHMANN_JSON_PASTE10(func, v1, v2, v3, v4, v5, v6, v7, v8, v9) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE9(func, v2, v3, v4, v5, v6, v7, v8, v9) +#define NLOHMANN_JSON_PASTE11(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE10(func, v2, v3, v4, v5, v6, v7, v8, v9, v10) +#define NLOHMANN_JSON_PASTE12(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE11(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) +#define NLOHMANN_JSON_PASTE13(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE12(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12) +#define NLOHMANN_JSON_PASTE14(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE13(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) +#define NLOHMANN_JSON_PASTE15(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE14(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) +#define NLOHMANN_JSON_PASTE16(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE15(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) +#define NLOHMANN_JSON_PASTE17(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE16(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16) +#define NLOHMANN_JSON_PASTE18(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE17(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17) +#define NLOHMANN_JSON_PASTE19(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE18(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18) +#define NLOHMANN_JSON_PASTE20(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE19(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19) +#define NLOHMANN_JSON_PASTE21(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE20(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20) +#define NLOHMANN_JSON_PASTE22(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE21(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21) +#define NLOHMANN_JSON_PASTE23(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE22(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) +#define NLOHMANN_JSON_PASTE24(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE23(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23) +#define NLOHMANN_JSON_PASTE25(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE24(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24) +#define NLOHMANN_JSON_PASTE26(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE25(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25) +#define NLOHMANN_JSON_PASTE27(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE26(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26) +#define NLOHMANN_JSON_PASTE28(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE27(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27) +#define NLOHMANN_JSON_PASTE29(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE28(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28) +#define NLOHMANN_JSON_PASTE30(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE29(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29) +#define NLOHMANN_JSON_PASTE31(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE30(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30) +#define NLOHMANN_JSON_PASTE32(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE31(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31) +#define NLOHMANN_JSON_PASTE33(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE32(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32) +#define NLOHMANN_JSON_PASTE34(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE33(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33) +#define NLOHMANN_JSON_PASTE35(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE34(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34) +#define NLOHMANN_JSON_PASTE36(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE35(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35) +#define NLOHMANN_JSON_PASTE37(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE36(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36) +#define NLOHMANN_JSON_PASTE38(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE37(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37) +#define NLOHMANN_JSON_PASTE39(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE38(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38) +#define NLOHMANN_JSON_PASTE40(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE39(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39) +#define NLOHMANN_JSON_PASTE41(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE40(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40) +#define NLOHMANN_JSON_PASTE42(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE41(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41) +#define NLOHMANN_JSON_PASTE43(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE42(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42) +#define NLOHMANN_JSON_PASTE44(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE43(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43) +#define NLOHMANN_JSON_PASTE45(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE44(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44) +#define NLOHMANN_JSON_PASTE46(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE45(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45) +#define NLOHMANN_JSON_PASTE47(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE46(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46) +#define NLOHMANN_JSON_PASTE48(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE47(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47) +#define NLOHMANN_JSON_PASTE49(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE48(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48) +#define NLOHMANN_JSON_PASTE50(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE49(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49) +#define NLOHMANN_JSON_PASTE51(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE50(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50) +#define NLOHMANN_JSON_PASTE52(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE51(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51) +#define NLOHMANN_JSON_PASTE53(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE52(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52) +#define NLOHMANN_JSON_PASTE54(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE53(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53) +#define NLOHMANN_JSON_PASTE55(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE54(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54) +#define NLOHMANN_JSON_PASTE56(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE55(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55) +#define NLOHMANN_JSON_PASTE57(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE56(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56) +#define NLOHMANN_JSON_PASTE58(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE57(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57) +#define NLOHMANN_JSON_PASTE59(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE58(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58) +#define NLOHMANN_JSON_PASTE60(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE59(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59) +#define NLOHMANN_JSON_PASTE61(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE60(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60) +#define NLOHMANN_JSON_PASTE62(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE61(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61) +#define NLOHMANN_JSON_PASTE63(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE62(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62) +#define NLOHMANN_JSON_PASTE64(func, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE63(func, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22, v23, v24, v25, v26, v27, v28, v29, v30, v31, v32, v33, v34, v35, v36, v37, v38, v39, v40, v41, v42, v43, v44, v45, v46, v47, v48, v49, v50, v51, v52, v53, v54, v55, v56, v57, v58, v59, v60, v61, v62, v63) + +#define NLOHMANN_JSON_TO(v1) nlohmann_json_j[#v1] = nlohmann_json_t.v1; +#define NLOHMANN_JSON_FROM(v1) nlohmann_json_j.at(#v1).get_to(nlohmann_json_t.v1); +#define NLOHMANN_JSON_FROM_WITH_DEFAULT(v1) nlohmann_json_t.v1 = nlohmann_json_j.value(#v1, nlohmann_json_default_obj.v1); + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + friend void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + friend void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + +/*! +@brief macro +@def NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE +@since version 3.9.0 +*/ +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM, __VA_ARGS__)) } + +#define NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(Type, ...) \ + inline void to_json(nlohmann::json& nlohmann_json_j, const Type& nlohmann_json_t) { NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_TO, __VA_ARGS__)) } \ + inline void from_json(const nlohmann::json& nlohmann_json_j, Type& nlohmann_json_t) { Type nlohmann_json_default_obj; NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_PASTE(NLOHMANN_JSON_FROM_WITH_DEFAULT, __VA_ARGS__)) } + + +// inspired from https://stackoverflow.com/a/26745591 +// allows to call any std function as if (e.g. with begin): +// using std::begin; begin(x); +// +// it allows using the detected idiom to retrieve the return type +// of such an expression +#define NLOHMANN_CAN_CALL_STD_FUNC_IMPL(std_name) \ + namespace detail { \ + using std::std_name; \ + \ + template \ + using result_of_##std_name = decltype(std_name(std::declval()...)); \ + } \ + \ + namespace detail2 { \ + struct std_name##_tag \ + { \ + }; \ + \ + template \ + std_name##_tag std_name(T&&...); \ + \ + template \ + using result_of_##std_name = decltype(std_name(std::declval()...)); \ + \ + template \ + struct would_call_std_##std_name \ + { \ + static constexpr auto const value = ::nlohmann::detail:: \ + is_detected_exact::value; \ + }; \ + } /* namespace detail2 */ \ + \ + template \ + struct would_call_std_##std_name : detail2::would_call_std_##std_name \ + { \ + } + +#ifndef JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_USE_IMPLICIT_CONVERSIONS 1 +#endif + +#if JSON_USE_IMPLICIT_CONVERSIONS + #define JSON_EXPLICIT +#else + #define JSON_EXPLICIT explicit +#endif + +#ifndef JSON_DISABLE_ENUM_SERIALIZATION + #define JSON_DISABLE_ENUM_SERIALIZATION 0 +#endif + +#ifndef JSON_USE_GLOBAL_UDLS + #define JSON_USE_GLOBAL_UDLS 1 +#endif + +#if JSON_HAS_THREE_WAY_COMPARISON + #include // partial_ordering +#endif + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +/////////////////////////// +// JSON type enumeration // +/////////////////////////// + +/*! +@brief the JSON type enumeration + +This enumeration collects the different JSON types. It is internally used to +distinguish the stored values, and the functions @ref basic_json::is_null(), +@ref basic_json::is_object(), @ref basic_json::is_array(), +@ref basic_json::is_string(), @ref basic_json::is_boolean(), +@ref basic_json::is_number() (with @ref basic_json::is_number_integer(), +@ref basic_json::is_number_unsigned(), and @ref basic_json::is_number_float()), +@ref basic_json::is_discarded(), @ref basic_json::is_primitive(), and +@ref basic_json::is_structured() rely on it. + +@note There are three enumeration entries (number_integer, number_unsigned, and +number_float), because the library distinguishes these three types for numbers: +@ref basic_json::number_unsigned_t is used for unsigned integers, +@ref basic_json::number_integer_t is used for signed integers, and +@ref basic_json::number_float_t is used for floating-point numbers or to +approximate integers which do not fit in the limits of their respective type. + +@sa see @ref basic_json::basic_json(const value_t value_type) -- create a JSON +value with the default value for a given type + +@since version 1.0.0 +*/ +enum class value_t : std::uint8_t +{ + null, ///< null value + object, ///< object (unordered set of name/value pairs) + array, ///< array (ordered collection of values) + string, ///< string value + boolean, ///< boolean value + number_integer, ///< number value (signed integer) + number_unsigned, ///< number value (unsigned integer) + number_float, ///< number value (floating-point) + binary, ///< binary array (ordered collection of bytes) + discarded ///< discarded by the parser callback function +}; + +/*! +@brief comparison operator for JSON types + +Returns an ordering that is similar to Python: +- order: null < boolean < number < object < array < string < binary +- furthermore, each type is not smaller than itself +- discarded values are not comparable +- binary is represented as a b"" string in python and directly comparable to a + string; however, making a binary array directly comparable with a string would + be surprising behavior in a JSON file. + +@since version 1.0.0 +*/ +#if JSON_HAS_THREE_WAY_COMPARISON + inline std::partial_ordering operator<=>(const value_t lhs, const value_t rhs) noexcept // *NOPAD* +#else + inline bool operator<(const value_t lhs, const value_t rhs) noexcept +#endif +{ + static constexpr std::array order = {{ + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */, + 6 /* binary */ + } + }; + + const auto l_index = static_cast(lhs); + const auto r_index = static_cast(rhs); +#if JSON_HAS_THREE_WAY_COMPARISON + if (l_index < order.size() && r_index < order.size()) + { + return order[l_index] <=> order[r_index]; // *NOPAD* + } + return std::partial_ordering::unordered; +#else + return l_index < order.size() && r_index < order.size() && order[l_index] < order[r_index]; +#endif +} + +// GCC selects the built-in operator< over an operator rewritten from +// a user-defined spaceship operator +// Clang, MSVC, and ICC select the rewritten candidate +// (see GCC bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105200) +#if JSON_HAS_THREE_WAY_COMPARISON && defined(__GNUC__) +inline bool operator<(const value_t lhs, const value_t rhs) noexcept +{ + return std::is_lt(lhs <=> rhs); // *NOPAD* +} +#endif + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +/*! +@brief replace all occurrences of a substring by another string + +@param[in,out] s the string to manipulate; changed so that all + occurrences of @a f are replaced with @a t +@param[in] f the substring to replace with @a t +@param[in] t the string to replace @a f + +@pre The search string @a f must not be empty. **This precondition is +enforced with an assertion.** + +@since version 2.0.0 +*/ +template +inline void replace_substring(StringType& s, const StringType& f, + const StringType& t) +{ + JSON_ASSERT(!f.empty()); + for (auto pos = s.find(f); // find first occurrence of f + pos != StringType::npos; // make sure f was found + s.replace(pos, f.size(), t), // replace with t, and + pos = s.find(f, pos + t.size())) // find next occurrence of f + {} +} + +/*! + * @brief string escaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to escape + * @return escaped string + * + * Note the order of escaping "~" to "~0" and "/" to "~1" is important. + */ +template +inline StringType escape(StringType s) +{ + replace_substring(s, StringType{"~"}, StringType{"~0"}); + replace_substring(s, StringType{"/"}, StringType{"~1"}); + return s; +} + +/*! + * @brief string unescaping as described in RFC 6901 (Sect. 4) + * @param[in] s string to unescape + * @return unescaped string + * + * Note the order of escaping "~1" to "/" and "~0" to "~" is important. + */ +template +static void unescape(StringType& s) +{ + replace_substring(s, StringType{"~1"}, StringType{"/"}); + replace_substring(s, StringType{"~0"}, StringType{"~"}); +} + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // size_t + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +/// struct to capture the start position of the current token +struct position_t +{ + /// the total number of characters read + std::size_t chars_read_total = 0; + /// the number of characters read in the current line + std::size_t chars_read_current_line = 0; + /// the number of lines read + std::size_t lines_read = 0; + + /// conversion to size_t to preserve SAX interface + constexpr operator size_t() const + { + return chars_read_total; + } +}; + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-FileCopyrightText: 2018 The Abseil Authors +// SPDX-License-Identifier: MIT + + + +#include // array +#include // size_t +#include // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type +#include // index_sequence, make_index_sequence, index_sequence_for + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +template +using uncvref_t = typename std::remove_cv::type>::type; + +#ifdef JSON_HAS_CPP_14 + +// the following utilities are natively available in C++14 +using std::enable_if_t; +using std::index_sequence; +using std::make_index_sequence; +using std::index_sequence_for; + +#else + +// alias templates to reduce boilerplate +template +using enable_if_t = typename std::enable_if::type; + +// The following code is taken from https://github.com/abseil/abseil-cpp/blob/10cb35e459f5ecca5b2ff107635da0bfa41011b4/absl/utility/utility.h +// which is part of Google Abseil (https://github.com/abseil/abseil-cpp), licensed under the Apache License 2.0. + +//// START OF CODE FROM GOOGLE ABSEIL + +// integer_sequence +// +// Class template representing a compile-time integer sequence. An instantiation +// of `integer_sequence` has a sequence of integers encoded in its +// type through its template arguments (which is a common need when +// working with C++11 variadic templates). `absl::integer_sequence` is designed +// to be a drop-in replacement for C++14's `std::integer_sequence`. +// +// Example: +// +// template< class T, T... Ints > +// void user_function(integer_sequence); +// +// int main() +// { +// // user_function's `T` will be deduced to `int` and `Ints...` +// // will be deduced to `0, 1, 2, 3, 4`. +// user_function(make_integer_sequence()); +// } +template +struct integer_sequence +{ + using value_type = T; + static constexpr std::size_t size() noexcept + { + return sizeof...(Ints); + } +}; + +// index_sequence +// +// A helper template for an `integer_sequence` of `size_t`, +// `absl::index_sequence` is designed to be a drop-in replacement for C++14's +// `std::index_sequence`. +template +using index_sequence = integer_sequence; + +namespace utility_internal +{ + +template +struct Extend; + +// Note that SeqSize == sizeof...(Ints). It's passed explicitly for efficiency. +template +struct Extend, SeqSize, 0> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)... >; +}; + +template +struct Extend, SeqSize, 1> +{ + using type = integer_sequence < T, Ints..., (Ints + SeqSize)..., 2 * SeqSize >; +}; + +// Recursion helper for 'make_integer_sequence'. +// 'Gen::type' is an alias for 'integer_sequence'. +template +struct Gen +{ + using type = + typename Extend < typename Gen < T, N / 2 >::type, N / 2, N % 2 >::type; +}; + +template +struct Gen +{ + using type = integer_sequence; +}; + +} // namespace utility_internal + +// Compile-time sequences of integers + +// make_integer_sequence +// +// This template alias is equivalent to +// `integer_sequence`, and is designed to be a drop-in +// replacement for C++14's `std::make_integer_sequence`. +template +using make_integer_sequence = typename utility_internal::Gen::type; + +// make_index_sequence +// +// This template alias is equivalent to `index_sequence<0, 1, ..., N-1>`, +// and is designed to be a drop-in replacement for C++14's +// `std::make_index_sequence`. +template +using make_index_sequence = make_integer_sequence; + +// index_sequence_for +// +// Converts a typename pack into an index sequence of the same length, and +// is designed to be a drop-in replacement for C++14's +// `std::index_sequence_for()` +template +using index_sequence_for = make_index_sequence; + +//// END OF CODE FROM GOOGLE ABSEIL + +#endif + +// dispatch utility (taken from ranges-v3) +template struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template +struct static_const +{ + static JSON_INLINE_VARIABLE constexpr T value{}; +}; + +#ifndef JSON_HAS_CPP_17 + template + constexpr T static_const::value; +#endif + +template +inline constexpr std::array make_array(Args&& ... args) +{ + return std::array {{static_cast(std::forward(args))...}}; +} + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // numeric_limits +#include // false_type, is_constructible, is_integral, is_same, true_type +#include // declval +#include // tuple + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +#include // random_access_iterator_tag + +// #include + +// #include + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN +namespace detail +{ + +template +struct iterator_types {}; + +template +struct iterator_types < + It, + void_t> +{ + using difference_type = typename It::difference_type; + using value_type = typename It::value_type; + using pointer = typename It::pointer; + using reference = typename It::reference; + using iterator_category = typename It::iterator_category; +}; + +// This is required as some compilers implement std::iterator_traits in a way that +// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. +template +struct iterator_traits +{ +}; + +template +struct iterator_traits < T, enable_if_t < !std::is_pointer::value >> + : iterator_types +{ +}; + +template +struct iterator_traits::value>> +{ + using iterator_category = std::random_access_iterator_tag; + using value_type = T; + using difference_type = ptrdiff_t; + using pointer = T*; + using reference = T&; +}; + +} // namespace detail +NLOHMANN_JSON_NAMESPACE_END + +// #include + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN + +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(begin); + +NLOHMANN_JSON_NAMESPACE_END + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + + + +// #include + + +NLOHMANN_JSON_NAMESPACE_BEGIN + +NLOHMANN_CAN_CALL_STD_FUNC_IMPL(end); + +NLOHMANN_JSON_NAMESPACE_END + +// #include + +// #include + +// #include +// __ _____ _____ _____ +// __| | __| | | | JSON for Modern C++ +// | | |__ | | | | | | version 3.11.2 +// |_____|_____|_____|_|___| https://github.com/nlohmann/json +// +// SPDX-FileCopyrightText: 2013-2022 Niels Lohmann +// SPDX-License-Identifier: MIT + +#ifndef INCLUDE_NLOHMANN_JSON_FWD_HPP_ + #define INCLUDE_NLOHMANN_JSON_FWD_HPP_ + + #include // int64_t, uint64_t + #include // map + #include // allocator + #include // string + #include // vector + + // #include + + + /*! + @brief namespace for Niels Lohmann + @see https://github.com/nlohmann + @since version 1.0.0 + */ + NLOHMANN_JSON_NAMESPACE_BEGIN + + /*! + @brief default JSONSerializer template argument + + This serializer ignores the template arguments and uses ADL + ([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) + for serialization. + */ + template + struct adl_serializer; + + /// a class to store JSON values + /// @sa https://json.nlohmann.me/api/basic_json/ + template class ObjectType = + std::map, + template class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template class AllocatorType = std::allocator, + template class JSONSerializer = + adl_serializer, + class BinaryType = std::vector> + class basic_json; + + /// @brief JSON Pointer defines a string syntax for identifying a specific value within a JSON document + /// @sa https://json.nlohmann.me/api/json_pointer/ + template + class json_pointer; + + /*! + @brief default specialization + @sa https://json.nlohmann.me/api/json/ + */ + using json = basic_json<>; + + /// @brief a minimal map-like container that preserves insertion order + /// @sa https://json.nlohmann.me/api/ordered_map/ + template + struct ordered_map; + + /// @brief specialization that maintains the insertion order of object keys + /// @sa https://json.nlohmann.me/api/ordered_json/ + using ordered_json = basic_json; + + NLOHMANN_JSON_NAMESPACE_END + +#endif // INCLUDE_NLOHMANN_JSON_FWD_HPP_ + + +NLOHMANN_JSON_NAMESPACE_BEGIN +/*! +@brief detail namespace with internal helper functions + +This namespace collects functions that should not be exposed, +implementations of some @ref basic_json methods, and meta-programming helpers. + +@since version 2.1.0 +*/ +namespace detail +{ + +///////////// +// helpers // +///////////// + +// Note to maintainers: +// +// Every trait in this file expects a non CV-qualified type. +// The only exceptions are in the 'aliases for detected' section +// (i.e. those of the form: decltype(T::member_function(std::declval()))) +// +// In this case, T has to be properly CV-qualified to constraint the function arguments +// (e.g. to_json(BasicJsonType&, const T&)) + +template struct is_basic_json : std::false_type {}; + +NLOHMANN_BASIC_JSON_TPL_DECLARATION +struct is_basic_json : std::true_type {}; + +// used by exceptions create() member functions +// true_type for pointer to possibly cv-qualified basic_json or std::nullptr_t +// false_type otherwise +template +struct is_basic_json_context : + std::integral_constant < bool, + is_basic_json::type>::type>::value + || std::is_same::value > +{}; + +////////////////////// +// json_ref helpers // +////////////////////// + +template +class json_ref; + +template +struct is_json_ref : std::false_type {}; + +template +struct is_json_ref> : std::true_type {}; + +////////////////////////// +// aliases for detected // +////////////////////////// + +template +using mapped_type_t = typename T::mapped_type; + +template +using key_type_t = typename T::key_type; + +template +using value_type_t = typename T::value_type; + +template +using difference_type_t = typename T::difference_type; + +template +using pointer_t = typename T::pointer; + +template +using reference_t = typename T::reference; + +template +using iterator_category_t = typename T::iterator_category; + +template +using to_json_function = decltype(T::to_json(std::declval()...)); + +template +using from_json_function = decltype(T::from_json(std::declval()...)); + +template +using get_template_function = decltype(std::declval().template get()); + +// trait checking if JSONSerializer::from_json(json const&, udt&) exists +template +struct has_from_json : std::false_type {}; + +// trait checking if j.get is valid +// use this trait instead of std::is_constructible or std::is_convertible, +// both rely on, or make use of implicit conversions, and thus fail when T +// has several constructors/operator= (see https://github.com/nlohmann/json/issues/958) +template +struct is_getable +{ + static constexpr bool value = is_detected::value; +}; + +template +struct has_from_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + +// This trait checks if JSONSerializer::from_json(json const&) exists +// this overload is used for non-default-constructible user-defined-types +template +struct has_non_default_from_json : std::false_type {}; + +template +struct has_non_default_from_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + +// This trait checks if BasicJsonType::json_serializer::to_json exists +// Do not evaluate the trait when T is a basic_json type, to avoid template instantiation infinite recursion. +template +struct has_to_json : std::false_type {}; + +template +struct has_to_json < BasicJsonType, T, enable_if_t < !is_basic_json::value >> +{ + using serializer = typename BasicJsonType::template json_serializer; + + static constexpr bool value = + is_detected_exact::value; +}; + +template +using detect_key_compare = typename T::key_compare; + +template +struct has_key_compare : std::integral_constant::value> {}; + +// obtains the actual object key comparator +template +struct actual_object_comparator +{ + using object_t = typename BasicJsonType::object_t; + using object_comparator_t = typename BasicJsonType::default_object_comparator_t; + using type = typename std::conditional < has_key_compare::value, + typename object_t::key_compare, object_comparator_t>::type; +}; + +template +using actual_object_comparator_t = typename actual_object_comparator::type; + +/////////////////// +// is_ functions // +/////////////////// + +// https://en.cppreference.com/w/cpp/types/conjunction +template struct conjunction : std::true_type { }; +template struct conjunction : B { }; +template +struct conjunction +: std::conditional(B::value), conjunction, B>::type {}; + +// https://en.cppreference.com/w/cpp/types/negation +template struct negation : std::integral_constant < bool, !B::value > { }; + +// Reimplementation of is_constructible and is_default_constructible, due to them being broken for +// std::pair and std::tuple until LWG 2367 fix (see https://cplusplus.github.io/LWG/lwg-defects.html#2367). +// This causes compile errors in e.g. clang 3.5 or gcc 4.9. +template +struct is_default_constructible : std::is_default_constructible {}; + +template +struct is_default_constructible> + : conjunction, is_default_constructible> {}; + +template +struct is_default_constructible> + : conjunction, is_default_constructible> {}; + +template +struct is_default_constructible> + : conjunction...> {}; + +template +struct is_default_constructible> + : conjunction...> {}; + + +template +struct is_constructible : std::is_constructible {}; + +template +struct is_constructible> : is_default_constructible> {}; + +template +struct is_constructible> : is_default_constructible> {}; + +template +struct is_constructible> : is_default_constructible> {}; + +template +struct is_constructible> : is_default_constructible> {}; + + +template +struct is_iterator_traits : std::false_type {}; + +template +struct is_iterator_traits> +{ + private: + using traits = iterator_traits; + + public: + static constexpr auto value = + is_detected::value && + is_detected::value && + is_detected::value && + is_detected::value && + is_detected::value; +}; + +template +struct is_range +{ + private: + using t_ref = typename std::add_lvalue_reference::type; + + using iterator = detected_t; + using sentinel = detected_t; + + // to be 100% correct, it should use https://en.cppreference.com/w/cpp/iterator/input_or_output_iterator + // and https://en.cppreference.com/w/cpp/iterator/sentinel_for + // but reimplementing these would be too much work, as a lot of other concepts are used underneath + static constexpr auto is_iterator_begin = + is_iterator_traits>::value; + + public: + static constexpr bool value = !std::is_same::value && !std::is_same::value && is_iterator_begin; +}; + +template +using iterator_t = enable_if_t::value, result_of_begin())>>; + +template +using range_value_t = value_type_t>>; + +// The following implementation of is_complete_type is taken from +// https://blogs.msdn.microsoft.com/vcblog/2015/12/02/partial-support-for-expression-sfinae-in-vs-2015-update-1/ +// and is written by Xiang Fan who agreed to using it in this library. + +template +struct is_complete_type : std::false_type {}; + +template +struct is_complete_type : std::true_type {}; + +template +struct is_compatible_object_type_impl : std::false_type {}; + +template +struct is_compatible_object_type_impl < + BasicJsonType, CompatibleObjectType, + enable_if_t < is_detected::value&& + is_detected::value >> +{ + using object_t = typename BasicJsonType::object_t; + + // macOS's is_constructible does not play well with nonesuch... + static constexpr bool value = + is_constructible::value && + is_constructible::value; +}; + +template +struct is_compatible_object_type + : is_compatible_object_type_impl {}; + +template +struct is_constructible_object_type_impl : std::false_type {}; + +template +struct is_constructible_object_type_impl < + BasicJsonType, ConstructibleObjectType, + enable_if_t < is_detected::value&& + is_detected::value >> +{ + using object_t = typename BasicJsonType::object_t; + + static constexpr bool value = + (is_default_constructible::value && + (std::is_move_assignable::value || + std::is_copy_assignable::value) && + (is_constructible::value && + std::is_same < + typename object_t::mapped_type, + typename ConstructibleObjectType::mapped_type >::value)) || + (has_from_json::value || + has_non_default_from_json < + BasicJsonType, + typename ConstructibleObjectType::mapped_type >::value); +}; + +template +struct is_constructible_object_type + : is_constructible_object_type_impl {}; + +template +struct is_compatible_string_type +{ + static constexpr auto value = + is_constructible::value; +}; + +template +struct is_constructible_string_type +{ + // launder type through decltype() to fix compilation failure on ICPC +#ifdef __INTEL_COMPILER + using laundered_type = decltype(std::declval()); +#else + using laundered_type = ConstructibleStringType; +#endif + + static constexpr auto value = + conjunction < + is_constructible, + is_detected_exact>::value; +}; + +template +struct is_compatible_array_type_impl : std::false_type {}; + +template +struct is_compatible_array_type_impl < + BasicJsonType, CompatibleArrayType, + enable_if_t < + is_detected::value&& + is_iterator_traits>>::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 + !std::is_same>::value >> +{ + static constexpr bool value = + is_constructible>::value; +}; + +template +struct is_compatible_array_type + : is_compatible_array_type_impl {}; + +template +struct is_constructible_array_type_impl : std::false_type {}; + +template +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t::value >> + : std::true_type {}; + +template +struct is_constructible_array_type_impl < + BasicJsonType, ConstructibleArrayType, + enable_if_t < !std::is_same::value&& + !is_compatible_string_type::value&& + is_default_constructible::value&& +(std::is_move_assignable::value || + std::is_copy_assignable::value)&& +is_detected::value&& +is_iterator_traits>>::value&& +is_detected::value&& +// special case for types like std::filesystem::path whose iterator's value_type are themselves +// c.f. https://github.com/nlohmann/json/pull/3073 +!std::is_same>::value&& + is_complete_type < + detected_t>::value >> +{ + using value_type = range_value_t; + + static constexpr bool value = + std::is_same::value || + has_from_json::value || + has_non_default_from_json < + BasicJsonType, + value_type >::value; +}; + +template +struct is_constructible_array_type + : is_constructible_array_type_impl {}; + +template +struct is_compatible_integer_type_impl : std::false_type {}; + +template +struct is_compatible_integer_type_impl < + RealIntegerType, CompatibleNumberIntegerType, + enable_if_t < std::is_integral::value&& + std::is_integral::value&& + !std::is_same::value >> +{ + // is there an assert somewhere on overflows? + using RealLimits = std::numeric_limits; + using CompatibleLimits = std::numeric_limits; + + static constexpr auto value = + is_constructible::value && + CompatibleLimits::is_integer && + RealLimits::is_signed == CompatibleLimits::is_signed; +}; + +template +struct is_compatible_integer_type + : is_compatible_integer_type_impl {}; + +template +struct is_compatible_type_impl: std::false_type {}; + +template +struct is_compatible_type_impl < + BasicJsonType, CompatibleType, + enable_if_t::value >> +{ + static constexpr bool value = + has_to_json::value; +}; + +template +struct is_compatible_type + : is_compatible_type_impl {}; + +template +struct is_constructible_tuple : std::false_type {}; + +template +struct is_constructible_tuple> : conjunction...> {}; + +template +struct is_json_iterator_of : std::false_type {}; + +template +struct is_json_iterator_of : std::true_type {}; + +template +struct is_json_iterator_of : std::true_type +{}; + +// checks if a given type T is a template specialization of Primary +template