diff --git a/ollama/.dockerignore b/ollama/.dockerignore new file mode 100755 index 0000000000000000000000000000000000000000..43f2e07dd4380dd177270e5c75426f7f664862a9 --- /dev/null +++ b/ollama/.dockerignore @@ -0,0 +1,9 @@ +.vscode +ollama +app +macapp +dist +llm/llama.cpp +.env +.cache +test_data diff --git a/ollama/.gitattributes b/ollama/.gitattributes new file mode 100755 index 0000000000000000000000000000000000000000..f1c8bcb4d217a19fa708460aee7021358fdc6bcb --- /dev/null +++ b/ollama/.gitattributes @@ -0,0 +1,3 @@ +llm/ext_server/* linguist-vendored +* text=auto +*.go text eol=lf diff --git a/ollama/.github/ISSUE_TEMPLATE/10_bug_report.yml b/ollama/.github/ISSUE_TEMPLATE/10_bug_report.yml new file mode 100755 index 0000000000000000000000000000000000000000..d0c79bca8696a92c34a28a61d2f4d2adb99f9be0 --- /dev/null +++ b/ollama/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -0,0 +1,60 @@ +name: Bug report +labels: [bug] +description: Something isn't working right. +body: + - type: textarea + id: description + attributes: + label: What is the issue? + description: What happened? What did you expect to happen? + validations: + required: true + - type: dropdown + id: os + attributes: + label: OS + description: Which operating system are you using? + multiple: true + options: + - Linux + - macOS + - Windows + - Docker + - WSL2 + validations: + required: false + - type: dropdown + id: gpu + attributes: + label: GPU + description: Which GPU are you using? + multiple: true + options: + - Nvidia + - AMD + - Intel + - Apple + - Other + validations: + required: false + - type: dropdown + id: cpu + attributes: + label: CPU + description: Which CPU are you using? + multiple: true + options: + - Intel + - AMD + - Apple + - Other + validations: + required: false + - type: input + id: version + attributes: + label: Ollama version + description: What version of Ollama are you using? (`ollama --version`) + placeholder: e.g., 0.1.32 + validations: + required: false diff --git a/ollama/.github/ISSUE_TEMPLATE/20_feature_request.md b/ollama/.github/ISSUE_TEMPLATE/20_feature_request.md new file mode 100755 index 0000000000000000000000000000000000000000..e8997219c037331c41a97d7fb4970364e58a8b35 --- /dev/null +++ b/ollama/.github/ISSUE_TEMPLATE/20_feature_request.md @@ -0,0 +1,6 @@ +--- +name: Feature request +about: Request a new feature +labels: feature request +--- + diff --git a/ollama/.github/ISSUE_TEMPLATE/30_model_request.md b/ollama/.github/ISSUE_TEMPLATE/30_model_request.md new file mode 100755 index 0000000000000000000000000000000000000000..c705a5f37a7e89fc59b5e42c82bc7cd9b6c7c54c --- /dev/null +++ b/ollama/.github/ISSUE_TEMPLATE/30_model_request.md @@ -0,0 +1,5 @@ +--- +name: Model request +about: Request support for a new model to be added to Ollama +labels: model request +--- \ No newline at end of file diff --git a/ollama/.github/ISSUE_TEMPLATE/config.yml b/ollama/.github/ISSUE_TEMPLATE/config.yml new file mode 100755 index 0000000000000000000000000000000000000000..70d9aa3a0662753f2d4e2e87fec19ba5e8525fb4 --- /dev/null +++ b/ollama/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: true +contact_links: + - name: Help + url: https://discord.com/invite/ollama + about: Please join our Discord server for help using Ollama + - name: Troubleshooting + url: https://github.com/ollama/ollama/blob/main/docs/faq.md#faq + about: See the FAQ for common issues and solutions diff --git a/ollama/.github/workflows/latest.yaml b/ollama/.github/workflows/latest.yaml new file mode 100755 index 0000000000000000000000000000000000000000..4d47dd368268b967873d0d60ee2bcc72d8704a3e --- /dev/null +++ b/ollama/.github/workflows/latest.yaml @@ -0,0 +1,24 @@ +name: latest + +on: + release: + types: [released] + +jobs: + update-latest: + environment: release + runs-on: linux + steps: + - uses: actions/checkout@v4 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - name: Tag images as latest + env: + PUSH: "1" + shell: bash + run: | + export "VERSION=${GITHUB_REF_NAME#v}" + ./scripts/tag_latest.sh diff --git a/ollama/.github/workflows/release.yaml b/ollama/.github/workflows/release.yaml new file mode 100755 index 0000000000000000000000000000000000000000..f0c6db5dd8f13adb0d8f50a9bba1970dbe2e0a9f --- /dev/null +++ b/ollama/.github/workflows/release.yaml @@ -0,0 +1,480 @@ +name: release + +on: + push: + tags: + - 'v*' + +jobs: + # Full build of the Mac assets + build-darwin: + runs-on: macos-12 + environment: release + steps: + - uses: actions/checkout@v4 + - name: Set Version + shell: bash + run: | + echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV + - name: key + env: + MACOS_SIGNING_KEY: ${{ secrets.MACOS_SIGNING_KEY }} + MACOS_SIGNING_KEY_PASSWORD: ${{ secrets.MACOS_SIGNING_KEY_PASSWORD }} + run: | + echo $MACOS_SIGNING_KEY | base64 --decode > certificate.p12 + security create-keychain -p password build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p password build.keychain + security import certificate.p12 -k build.keychain -P $MACOS_SIGNING_KEY_PASSWORD -T /usr/bin/codesign + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k password build.keychain + security set-keychain-settings -lut 3600 build.keychain + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - name: Build Darwin + env: + APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }} + APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }} + APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }} + APPLE_ID: ${{ vars.APPLE_ID }} + SDKROOT: /Applications/Xcode_13.4.1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk + DEVELOPER_DIR: /Applications/Xcode_13.4.1.app/Contents/Developer + run: | + ./scripts/build_darwin.sh + + - uses: actions/upload-artifact@v4 + with: + name: dist-darwin + path: | + dist/*arwin* + !dist/*-cov + + # Windows builds take a long time to both install the dependencies and build, so parallelize + # CPU generation step + generate-windows-cpu: + environment: release + runs-on: windows + env: + KEY_CONTAINER: ${{ vars.KEY_CONTAINER }} + steps: + - uses: actions/checkout@v4 + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - uses: 'google-github-actions/auth@v2' + with: + project_id: 'ollama' + credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}' + - run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt + - name: install Windows SDK 8.1 to get signtool + run: | + $ErrorActionPreference = "Stop" + write-host "downloading SDK" + Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe" + Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait + write-host "Win SDK 8.1 installed" + gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe' + - name: install signing plugin + run: | + $ErrorActionPreference = "Stop" + write-host "downloading plugin" + Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip" + Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\ + write-host "Installing plugin" + & "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet + write-host "plugin installed" + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - run: go get ./... + - run: | + $gopath=(get-command go).source | split-path -parent + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$env:PATH" + go generate -x ./... + name: go generate + - uses: actions/upload-artifact@v4 + with: + name: generate-windows-cpu + path: | + llm/build/**/bin/* + llm/build/**/*.a + dist/windows-amd64/** + + # ROCm generation step + generate-windows-rocm: + environment: release + runs-on: windows + env: + KEY_CONTAINER: ${{ vars.KEY_CONTAINER }} + steps: + - uses: actions/checkout@v4 + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - uses: 'google-github-actions/auth@v2' + with: + project_id: 'ollama' + credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}' + - run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt + - name: install Windows SDK 8.1 to get signtool + run: | + $ErrorActionPreference = "Stop" + write-host "downloading SDK" + Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe" + Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait + write-host "Win SDK 8.1 installed" + gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe' + - name: install signing plugin + run: | + $ErrorActionPreference = "Stop" + write-host "downloading plugin" + Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip" + Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\ + write-host "Installing plugin" + & "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet + write-host "plugin installed" + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - name: 'Install ROCm' + run: | + $ErrorActionPreference = "Stop" + write-host "downloading AMD HIP Installer" + Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q3-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe" + write-host "Installing AMD HIP" + Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait + write-host "Completed AMD HIP" + - name: 'Verify ROCm' + run: | + & 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version + - run: go get ./... + - run: | + $gopath=(get-command go).source | split-path -parent + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$env:PATH" + $env:OLLAMA_SKIP_CPU_GENERATE="1" + $env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path) + go generate -x ./... + name: go generate + - name: 'gather rocm dependencies' + run: | + $HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path) + md "dist\deps\bin\rocblas\library" + cp "${HIP_PATH}\bin\hipblas.dll" "dist\deps\bin\" + cp "${HIP_PATH}\bin\rocblas.dll" "dist\deps\bin\" + cp "${HIP_PATH}\bin\rocblas\library\*" "dist\deps\bin\rocblas\library\" + - uses: actions/upload-artifact@v4 + with: + name: generate-windows-rocm + path: | + llm/build/**/bin/* + dist/windows-amd64/** + - uses: actions/upload-artifact@v4 + with: + name: windows-rocm-deps + path: dist/deps/* + + # CUDA generation step + generate-windows-cuda: + environment: release + runs-on: windows + env: + KEY_CONTAINER: ${{ vars.KEY_CONTAINER }} + steps: + - uses: actions/checkout@v4 + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - uses: 'google-github-actions/auth@v2' + with: + project_id: 'ollama' + credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}' + - run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt + - name: install Windows SDK 8.1 to get signtool + run: | + $ErrorActionPreference = "Stop" + write-host "downloading SDK" + Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe" + Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait + write-host "Win SDK 8.1 installed" + gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe' + - name: install signing plugin + run: | + $ErrorActionPreference = "Stop" + write-host "downloading plugin" + Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip" + Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\ + write-host "Installing plugin" + & "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet + write-host "plugin installed" + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - name: 'Install CUDA' + run: | + $ErrorActionPreference = "Stop" + write-host "downloading CUDA Installer" + Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe" + write-host "Installing CUDA" + Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait + write-host "Completed CUDA" + $cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path) + $cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2' + echo "$cudaPath\bin" >> $env:GITHUB_PATH + echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV + echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV + echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV + - name: 'Verify CUDA' + run: nvcc -V + - run: go get ./... + - name: go generate + run: | + $gopath=(get-command go).source | split-path -parent + $cudabin=(get-command nvcc).source | split-path + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$cudabin;$env:PATH" + $env:OLLAMA_SKIP_CPU_GENERATE="1" + go generate -x ./... + - name: 'gather cuda dependencies' + run: | + $NVIDIA_DIR=(resolve-path 'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*\bin\')[0] + md "dist\deps" + cp "${NVIDIA_DIR}\cudart64_*.dll" "dist\deps\" + cp "${NVIDIA_DIR}\cublas64_*.dll" "dist\deps\" + cp "${NVIDIA_DIR}\cublasLt64_*.dll" "dist\deps\" + - uses: actions/upload-artifact@v4 + with: + name: generate-windows-cuda + path: | + llm/build/**/bin/* + dist/windows-amd64/** + - uses: actions/upload-artifact@v4 + with: + name: windows-cuda-deps + path: dist/deps/* + + # Import the prior generation steps and build the final windows assets + build-windows: + environment: release + runs-on: windows + needs: + - generate-windows-cuda + - generate-windows-rocm + - generate-windows-cpu + env: + KEY_CONTAINER: ${{ vars.KEY_CONTAINER }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - uses: 'google-github-actions/auth@v2' + with: + project_id: 'ollama' + credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}' + - run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt + - name: install Windows SDK 8.1 to get signtool + run: | + $ErrorActionPreference = "Stop" + write-host "downloading SDK" + Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe" + Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait + write-host "Win SDK 8.1 installed" + gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe' + - name: install signing plugin + run: | + $ErrorActionPreference = "Stop" + write-host "downloading plugin" + Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip" + Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\ + write-host "Installing plugin" + & "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet + write-host "plugin installed" + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - run: go get + - uses: actions/download-artifact@v4 + with: + name: generate-windows-cpu + - uses: actions/download-artifact@v4 + with: + name: generate-windows-cuda + - uses: actions/download-artifact@v4 + with: + name: windows-cuda-deps + - uses: actions/download-artifact@v4 + with: + name: windows-rocm-deps + - uses: actions/download-artifact@v4 + with: + name: generate-windows-rocm + - run: dir llm/build + - run: | + $gopath=(get-command go).source | split-path -parent + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$env:PATH" + $env:OLLAMA_SKIP_GENERATE="1" + & .\scripts\build_windows.ps1 + - uses: actions/upload-artifact@v4 + with: + name: dist-windows + path: | + dist/OllamaSetup.exe + dist/ollama-windows-*.zip + + # Linux x86 assets built using the container based build + build-linux-amd64: + environment: release + runs-on: linux + env: + OLLAMA_SKIP_MANIFEST_CREATE: '1' + BUILD_ARCH: amd64 + PUSH: '1' + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - run: | + ./scripts/build_linux.sh + ./scripts/build_docker.sh + mv dist/deps/* dist/ + - uses: actions/upload-artifact@v4 + with: + name: dist-linux-amd64 + path: | + dist/*linux* + !dist/*-cov + + # Linux ARM assets built using the container based build + # (at present, docker isn't pre-installed on arm ubunutu images) + build-linux-arm64: + environment: release + runs-on: linux-arm64 + env: + OLLAMA_SKIP_MANIFEST_CREATE: '1' + BUILD_ARCH: arm64 + PUSH: '1' + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Set Version + shell: bash + run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + - name: 'Install Docker' + run: | + # Add Docker's official GPG key: + env + uname -a + sudo apt-get update + sudo apt-get install -y ca-certificates curl + sudo install -m 0755 -d /etc/apt/keyrings + sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc + sudo chmod a+r /etc/apt/keyrings/docker.asc + + # Add the repository to Apt sources: + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io + sudo usermod -aG docker $USER + sudo apt-get install acl + sudo setfacl --modify user:$USER:rw /var/run/docker.sock + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - run: | + ./scripts/build_linux.sh + ./scripts/build_docker.sh + - uses: actions/upload-artifact@v4 + with: + name: dist-linux-arm64 + path: | + dist/*linux* + !dist/*-cov + + # Aggregate all the assets and ship a release + release: + needs: + - build-darwin + - build-windows + - build-linux-amd64 + - build-linux-arm64 + runs-on: linux + environment: release + permissions: + contents: write + env: + OLLAMA_SKIP_IMAGE_BUILD: '1' + PUSH: '1' + GH_TOKEN: ${{ github.token }} + steps: + - uses: actions/checkout@v4 + - name: Set Version + shell: bash + run: | + echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV + echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + - run: ./scripts/build_docker.sh + - name: Retrieve built artifact + uses: actions/download-artifact@v4 + with: + path: dist + pattern: dist-* + merge-multiple: true + - run: | + ls -lh dist/ + (cd dist; sha256sum * > sha256sum.txt) + cat dist/sha256sum.txt + - name: Create or update Release + run: | + echo "Looking for existing release for ${{ env.RELEASE_VERSION }}" + OLD_TAG=$(gh release ls --json name,tagName | jq -r ".[] | select(.name == \"${{ env.RELEASE_VERSION }}\") | .tagName") + if [ -n "$OLD_TAG" ]; then + echo "Updating release ${{ env.RELEASE_VERSION }} to point to new tag ${GITHUB_REF_NAME}" + gh release edit ${OLD_TAG} --tag ${GITHUB_REF_NAME} + else + echo "Creating new release ${{ env.RELEASE_VERSION }} pointing to tag ${GITHUB_REF_NAME}" + gh release create ${GITHUB_REF_NAME} \ + --title ${{ env.RELEASE_VERSION }} \ + --draft \ + --generate-notes \ + --prerelease + fi + echo "Uploading artifacts for tag ${GITHUB_REF_NAME}" + gh release upload ${GITHUB_REF_NAME} dist/* --clobber diff --git a/ollama/.github/workflows/test.yaml b/ollama/.github/workflows/test.yaml new file mode 100755 index 0000000000000000000000000000000000000000..a57d45fd7fd71318033ad09fc8e207375c36c863 --- /dev/null +++ b/ollama/.github/workflows/test.yaml @@ -0,0 +1,323 @@ +name: test + +concurrency: + # For PRs, later CI runs preempt previous ones. e.g. a force push on a PR + # cancels running CI jobs and starts all new ones. + # + # For non-PR pushes, concurrency.group needs to be unique for every distinct + # CI run we want to have happen. Use run_id, which in practice means all + # non-PR CI runs will be allowed to run without preempting each other. + group: ${{ github.workflow }}-$${{ github.pull_request.number || github.run_id }} + cancel-in-progress: true + +on: + pull_request: + paths: + - '**/*' + - '!docs/**' + - '!README.md' + +jobs: + changes: + runs-on: ubuntu-latest + outputs: + GENERATE: ${{ steps.changes.outputs.GENERATE }} + GENERATE_CUDA: ${{ steps.changes.outputs.GENERATE_CUDA }} + GENERATE_ROCM: ${{ steps.changes.outputs.GENERATE_ROCM }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - id: changes + run: | + changed() { + git diff-tree -r --no-commit-id --name-only \ + $(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) \ + ${{ github.event.pull_request.head.sha }} \ + | xargs python3 -c "import sys; from pathlib import Path; print(any(Path(x).match(glob) for x in sys.argv[1:] for glob in '$*'.split(' ')))" + } + + { + echo GENERATE=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**') + echo GENERATE_CUDA=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**') + echo GENERATE_ROCM=$(changed 'llm/llama.cpp' 'llm/patches/**' 'llm/ext_server/**' 'llm/generate/**') + } >>$GITHUB_OUTPUT + + generate: + needs: [changes] + if: ${{ needs.changes.outputs.GENERATE == 'True' }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-2019] + arch: [amd64, arm64] + exclude: + - os: ubuntu-latest + arch: arm64 + - os: windows-2019 + arch: arm64 + runs-on: ${{ matrix.os }} + env: + GOARCH: ${{ matrix.arch }} + CGO_ENABLED: '1' + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - run: go get ./... + - run: | + $gopath=(get-command go).source | split-path -parent + $gccpath=(get-command gcc).source | split-path -parent + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$gccpath;$env:PATH" + echo $env:PATH + go generate -x ./... + if: ${{ startsWith(matrix.os, 'windows-') }} + name: 'Windows Go Generate' + - run: go generate -x ./... + if: ${{ ! startsWith(matrix.os, 'windows-') }} + name: 'Unix Go Generate' + - run: go build . + - uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.os }}-${{ matrix.arch }}-libraries + path: | + llm/build/**/bin/* + llm/build/**/*.a + generate-cuda: + needs: [changes] + if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }} + strategy: + matrix: + cuda-version: + - '11.8.0' + runs-on: linux + container: nvidia/cuda:${{ matrix.cuda-version }}-devel-ubuntu20.04 + steps: + - run: | + apt-get update && apt-get install -y git build-essential curl + curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \ + | tar -zx -C /usr --strip-components 1 + env: + DEBIAN_FRONTEND: noninteractive + - uses: actions/checkout@v4 + - uses: actions/setup-go@v4 + with: + go-version-file: go.mod + cache: true + - run: go get ./... + - run: | + git config --global --add safe.directory /__w/ollama/ollama + go generate -x ./... + env: + OLLAMA_SKIP_CPU_GENERATE: '1' + - uses: actions/upload-artifact@v4 + with: + name: cuda-${{ matrix.cuda-version }}-libraries + path: | + llm/build/**/bin/* + dist/windows-amd64/** + generate-rocm: + needs: [changes] + if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }} + strategy: + matrix: + rocm-version: + - '6.1.2' + runs-on: linux + container: rocm/dev-ubuntu-20.04:${{ matrix.rocm-version }} + steps: + - run: | + apt-get update && apt-get install -y git build-essential curl rocm-libs + curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \ + | tar -zx -C /usr --strip-components 1 + env: + DEBIAN_FRONTEND: noninteractive + - uses: actions/checkout@v4 + - uses: actions/setup-go@v4 + with: + go-version-file: go.mod + cache: true + - run: go get ./... + - run: | + git config --global --add safe.directory /__w/ollama/ollama + go generate -x ./... + env: + OLLAMA_SKIP_CPU_GENERATE: '1' + - uses: actions/upload-artifact@v4 + with: + name: rocm-${{ matrix.rocm-version }}-libraries + path: | + llm/build/**/bin/* + dist/windows-amd64/** + + # ROCm generation step + generate-windows-rocm: + needs: [changes] + if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }} + runs-on: windows + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - name: 'Install ROCm' + run: | + $ErrorActionPreference = "Stop" + write-host "downloading AMD HIP Installer" + Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q3-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe" + write-host "Installing AMD HIP" + Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait + write-host "Completed AMD HIP" + - name: 'Verify ROCm' + run: | + & 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version + - run: go get ./... + - run: | + $gopath=(get-command go).source | split-path -parent + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$env:PATH" + $env:OLLAMA_SKIP_CPU_GENERATE="1" + $env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path) + go generate -x ./... + name: go generate + env: + OLLAMA_SKIP_CPU_GENERATE: '1' + # TODO - do we need any artifacts? + + # CUDA generation step + generate-windows-cuda: + needs: [changes] + if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }} + runs-on: windows + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - name: 'Install CUDA' + run: | + $ErrorActionPreference = "Stop" + write-host "downloading CUDA Installer" + Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe" + write-host "Installing CUDA" + Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait + write-host "Completed CUDA" + $cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path) + $cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2' + echo "$cudaPath\bin" >> $env:GITHUB_PATH + echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV + echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV + echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV + - name: 'Verify CUDA' + run: nvcc -V + - run: go get ./... + - name: go generate + run: | + $gopath=(get-command go).source | split-path -parent + $cudabin=(get-command nvcc).source | split-path + & "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1" + cd $env:GITHUB_WORKSPACE + $env:CMAKE_SYSTEM_VERSION="10.0.22621.0" + $env:PATH="$gopath;$cudabin;$env:PATH" + $env:OLLAMA_SKIP_CPU_GENERATE="1" + go generate -x ./... + env: + OLLAMA_SKIP_CPU_GENERATE: '1' + # TODO - do we need any artifacts? + + lint: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-2019] + arch: [amd64, arm64] + exclude: + - os: ubuntu-latest + arch: arm64 + - os: windows-2019 + arch: arm64 + - os: macos-latest + arch: amd64 + runs-on: ${{ matrix.os }} + env: + GOARCH: ${{ matrix.arch }} + CGO_ENABLED: '1' + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: false + - run: | + case ${{ matrix.arch }} in + amd64) echo ARCH=x86_64 ;; + arm64) echo ARCH=arm64 ;; + esac >>$GITHUB_ENV + shell: bash + - run: | + mkdir -p llm/build/linux/$ARCH/stub/bin + touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server + if: ${{ startsWith(matrix.os, 'ubuntu-') }} + - run: | + mkdir -p llm/build/darwin/$ARCH/stub/bin + touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server + if: ${{ startsWith(matrix.os, 'macos-') }} + - uses: golangci/golangci-lint-action@v6 + with: + args: --timeout 8m0s -v + test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-2019] + arch: [amd64] + exclude: + - os: ubuntu-latest + arch: arm64 + - os: windows-2019 + arch: arm64 + runs-on: ${{ matrix.os }} + env: + GOARCH: ${{ matrix.arch }} + CGO_ENABLED: '1' + OLLAMA_CPU_TARGET: 'static' + OLLAMA_SKIP_CPU_GENERATE: '1' + OLLAMA_SKIP_METAL_GENERATE: '1' + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - uses: actions/setup-go@v5 + with: + go-version: "stable" + cache: true + - run: | + case ${{ matrix.arch }} in + amd64) echo ARCH=x86_64 ;; + arm64) echo ARCH=arm64 ;; + esac >>$GITHUB_ENV + shell: bash + - run: | + mkdir -p llm/build/linux/$ARCH/stub/bin + touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server + if: ${{ startsWith(matrix.os, 'ubuntu-') }} + - run: | + mkdir -p llm/build/darwin/$ARCH/stub/bin + touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server + if: ${{ startsWith(matrix.os, 'macos-') }} + shell: bash + - run: go generate ./... + - run: go build + - run: go test -v ./... + - uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.os }}-binaries + path: ollama diff --git a/ollama/.gitignore b/ollama/.gitignore new file mode 100755 index 0000000000000000000000000000000000000000..0d826ab614dd419d8ff5f7c1f2e0d41445e64494 --- /dev/null +++ b/ollama/.gitignore @@ -0,0 +1,15 @@ +.DS_Store +.vscode +.env +.venv +.swp +dist +ollama +ggml-metal.metal +.cache +*.exe +.idea +test_data +*.crt +llm/build +__debug_bin* \ No newline at end of file diff --git a/ollama/.gitmodules b/ollama/.gitmodules new file mode 100755 index 0000000000000000000000000000000000000000..b92f645d75688e9e8fd5a5f3b5ce5ceda53efef9 --- /dev/null +++ b/ollama/.gitmodules @@ -0,0 +1,4 @@ +[submodule "llama.cpp"] + path = llm/llama.cpp + url = https://github.com/ggerganov/llama.cpp.git + shallow = true \ No newline at end of file diff --git a/ollama/.golangci.yaml b/ollama/.golangci.yaml new file mode 100755 index 0000000000000000000000000000000000000000..c2c8b52b2e24a94dbd0d0f42dbcb2252e50bb779 --- /dev/null +++ b/ollama/.golangci.yaml @@ -0,0 +1,44 @@ +run: + timeout: 5m +linters: + enable: + - asasalint + - bidichk + - bodyclose + - containedctx + - contextcheck + - errcheck + - exportloopref + - gci + - gocheckcompilerdirectives + - gofmt + - gofumpt + - gosimple + - govet + - ineffassign + - intrange + - makezero + - misspell + - nilerr + - nolintlint + - nosprintfhostport + - staticcheck + - tenv + - testifylint + - unconvert + - unused + - usestdlibvars + - wastedassign + - whitespace +linters-settings: + gci: + sections: [standard, default, localmodule] +severity: + default-severity: error + rules: + - linters: + - gofmt + - goimports + - intrange + - usestdlibvars + severity: info diff --git a/ollama/.prettierrc.json b/ollama/.prettierrc.json new file mode 100755 index 0000000000000000000000000000000000000000..0b3312d56f8c3b67feaa22d84065c7ec59c91d37 --- /dev/null +++ b/ollama/.prettierrc.json @@ -0,0 +1,10 @@ +{ + "trailingComma": "es5", + "tabWidth": 2, + "useTabs": false, + "semi": false, + "singleQuote": true, + "jsxSingleQuote": true, + "printWidth": 120, + "arrowParens": "avoid" +} diff --git a/ollama/Dockerfile b/ollama/Dockerfile new file mode 100755 index 0000000000000000000000000000000000000000..c8efdd8a27621a8662770e974e1bc756f1f64aa9 --- /dev/null +++ b/ollama/Dockerfile @@ -0,0 +1,144 @@ +ARG GOLANG_VERSION=1.22.5 +ARG CMAKE_VERSION=3.22.1 +# this CUDA_VERSION corresponds with the one specified in docs/gpu.md +ARG CUDA_VERSION=11.3.1 +ARG ROCM_VERSION=6.1.2 + +# Copy the minimal context we need to run the generate scripts +FROM scratch AS llm-code +COPY .git .git +COPY .gitmodules .gitmodules +COPY llm llm + +FROM --platform=linux/amd64 nvidia/cuda:$CUDA_VERSION-devel-centos7 AS cuda-build-amd64 +ARG CMAKE_VERSION +COPY ./scripts/rh_linux_deps.sh / +RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh +ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH +COPY --from=llm-code / /go/src/github.com/ollama/ollama/ +WORKDIR /go/src/github.com/ollama/ollama/llm/generate +ARG CGO_CFLAGS +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh + +FROM --platform=linux/arm64 nvidia/cuda:$CUDA_VERSION-devel-rockylinux8 AS cuda-build-arm64 +ARG CMAKE_VERSION +COPY ./scripts/rh_linux_deps.sh / +RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh +ENV PATH /opt/rh/gcc-toolset-10/root/usr/bin:$PATH +COPY --from=llm-code / /go/src/github.com/ollama/ollama/ +WORKDIR /go/src/github.com/ollama/ollama/llm/generate +ARG CGO_CFLAGS +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh + +FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete AS rocm-build-amd64 +ARG CMAKE_VERSION +COPY ./scripts/rh_linux_deps.sh / +RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh +ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH +ENV LIBRARY_PATH /opt/amdgpu/lib64 +COPY --from=llm-code / /go/src/github.com/ollama/ollama/ +WORKDIR /go/src/github.com/ollama/ollama/llm/generate +ARG CGO_CFLAGS +ARG AMDGPU_TARGETS +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh +RUN mkdir /tmp/scratch && \ + for dep in $(zcat /go/src/github.com/ollama/ollama/llm/build/linux/x86_64/rocm*/bin/deps.txt.gz) ; do \ + cp ${dep} /tmp/scratch/ || exit 1 ; \ + done && \ + (cd /opt/rocm/lib && tar cf - rocblas/library) | (cd /tmp/scratch/ && tar xf - ) && \ + mkdir -p /go/src/github.com/ollama/ollama/dist/deps/ && \ + (cd /tmp/scratch/ && tar czvf /go/src/github.com/ollama/ollama/dist/deps/ollama-linux-amd64-rocm.tgz . ) + + +FROM --platform=linux/amd64 centos:7 AS cpu-builder-amd64 +ARG CMAKE_VERSION +ARG GOLANG_VERSION +COPY ./scripts/rh_linux_deps.sh / +RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh +ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH +COPY --from=llm-code / /go/src/github.com/ollama/ollama/ +ARG OLLAMA_CUSTOM_CPU_DEFS +ARG CGO_CFLAGS +WORKDIR /go/src/github.com/ollama/ollama/llm/generate + +FROM --platform=linux/amd64 cpu-builder-amd64 AS static-build-amd64 +RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh +FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu-build-amd64 +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh +FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx-build-amd64 +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx" sh gen_linux.sh +FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx2-build-amd64 +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx2" sh gen_linux.sh + +FROM --platform=linux/arm64 rockylinux:8 AS cpu-builder-arm64 +ARG CMAKE_VERSION +ARG GOLANG_VERSION +COPY ./scripts/rh_linux_deps.sh / +RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh +ENV PATH /opt/rh/gcc-toolset-10/root/usr/bin:$PATH +COPY --from=llm-code / /go/src/github.com/ollama/ollama/ +ARG OLLAMA_CUSTOM_CPU_DEFS +ARG CGO_CFLAGS +WORKDIR /go/src/github.com/ollama/ollama/llm/generate + +FROM --platform=linux/arm64 cpu-builder-arm64 AS static-build-arm64 +RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh +FROM --platform=linux/arm64 cpu-builder-arm64 AS cpu-build-arm64 +RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh + + +# Intermediate stage used for ./scripts/build_linux.sh +FROM --platform=linux/amd64 cpu-build-amd64 AS build-amd64 +ENV CGO_ENABLED 1 +WORKDIR /go/src/github.com/ollama/ollama +COPY . . +COPY --from=static-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=cpu_avx-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=cpu_avx2-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=cuda-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/dist/deps/ ./dist/deps/ +ARG GOFLAGS +ARG CGO_CFLAGS +RUN go build -trimpath . + +# Intermediate stage used for ./scripts/build_linux.sh +FROM --platform=linux/arm64 cpu-build-arm64 AS build-arm64 +ENV CGO_ENABLED 1 +ARG GOLANG_VERSION +WORKDIR /go/src/github.com/ollama/ollama +COPY . . +COPY --from=static-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +COPY --from=cuda-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/ +ARG GOFLAGS +ARG CGO_CFLAGS +RUN go build -trimpath . + +# Runtime stages +FROM --platform=linux/amd64 ubuntu:22.04 as runtime-amd64 +RUN apt-get update && apt-get install -y ca-certificates +COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama +FROM --platform=linux/arm64 ubuntu:22.04 as runtime-arm64 +RUN apt-get update && apt-get install -y ca-certificates +COPY --from=build-arm64 /go/src/github.com/ollama/ollama/ollama /bin/ollama + +# Radeon images are much larger so we keep it distinct from the CPU/CUDA image +FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete as runtime-rocm +RUN update-pciids +COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama +EXPOSE 11434 +ENV OLLAMA_HOST 0.0.0.0 + +ENTRYPOINT ["/bin/ollama"] +CMD ["serve"] + +FROM runtime-$TARGETARCH +EXPOSE 11434 +ENV OLLAMA_HOST 0.0.0.0 +ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 +ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility +ENV NVIDIA_VISIBLE_DEVICES=all + +ENTRYPOINT ["/bin/ollama"] +CMD ["serve"] diff --git a/ollama/LICENSE b/ollama/LICENSE new file mode 100755 index 0000000000000000000000000000000000000000..8e3dc978a7ca8c53f56bbedc5b558116140fc02e --- /dev/null +++ b/ollama/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Ollama + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/ollama/README.md b/ollama/README.md new file mode 100755 index 0000000000000000000000000000000000000000..aae92e6c2a841b26eb08f83d808f828f22a9944c --- /dev/null +++ b/ollama/README.md @@ -0,0 +1,409 @@ +
{{ .Prompt }} {{ .Suffix }}
+```
+
+> [!NOTE]
+> CodeLlama 34B and 70B code completion and all instruct and Python fine-tuned models do not support fill-in-middle.
+
+#### Codestral
+
+Codestral [22B](https://ollama.com/library/codestral:22b) supports fill-in-middle.
+
+```gotmpl
+[SUFFIX]{{ .Suffix }}[PREFIX] {{ .Prompt }}
+```
diff --git a/ollama/docs/troubleshooting.md b/ollama/docs/troubleshooting.md
new file mode 100755
index 0000000000000000000000000000000000000000..589061a89a8d0137be0861fcabaedebb0d9a2e1a
--- /dev/null
+++ b/ollama/docs/troubleshooting.md
@@ -0,0 +1,96 @@
+# How to troubleshoot issues
+
+Sometimes Ollama may not perform as expected. One of the best ways to figure out what happened is to take a look at the logs. Find the logs on **Mac** by running the command:
+
+```shell
+cat ~/.ollama/logs/server.log
+```
+
+On **Linux** systems with systemd, the logs can be found with this command:
+
+```shell
+journalctl -u ollama --no-pager
+```
+
+When you run Ollama in a **container**, the logs go to stdout/stderr in the container:
+
+```shell
+docker logs
+```
+(Use `docker ps` to find the container name)
+
+If manually running `ollama serve` in a terminal, the logs will be on that terminal.
+
+When you run Ollama on **Windows**, there are a few different locations. You can view them in the explorer window by hitting `+R` and type in:
+- `explorer %LOCALAPPDATA%\Ollama` to view logs. The most recent server logs will be in `server.log` and older logs will be in `server-#.log`
+- `explorer %LOCALAPPDATA%\Programs\Ollama` to browse the binaries (The installer adds this to your user PATH)
+- `explorer %HOMEPATH%\.ollama` to browse where models and configuration is stored
+- `explorer %TEMP%` where temporary executable files are stored in one or more `ollama*` directories
+
+To enable additional debug logging to help troubleshoot problems, first **Quit the running app from the tray menu** then in a powershell terminal
+```powershell
+$env:OLLAMA_DEBUG="1"
+& "ollama app.exe"
+```
+
+Join the [Discord](https://discord.gg/ollama) for help interpreting the logs.
+
+## LLM libraries
+
+Ollama includes multiple LLM libraries compiled for different GPUs and CPU vector features. Ollama tries to pick the best one based on the capabilities of your system. If this autodetection has problems, or you run into other problems (e.g. crashes in your GPU) you can workaround this by forcing a specific LLM library. `cpu_avx2` will perform the best, followed by `cpu_avx` an the slowest but most compatible is `cpu`. Rosetta emulation under MacOS will work with the `cpu` library.
+
+In the server log, you will see a message that looks something like this (varies from release to release):
+
+```
+Dynamic LLM libraries [rocm_v6 cpu cpu_avx cpu_avx2 cuda_v11 rocm_v5]
+```
+
+**Experimental LLM Library Override**
+
+You can set OLLAMA_LLM_LIBRARY to any of the available LLM libraries to bypass autodetection, so for example, if you have a CUDA card, but want to force the CPU LLM library with AVX2 vector support, use:
+
+```
+OLLAMA_LLM_LIBRARY="cpu_avx2" ollama serve
+```
+
+You can see what features your CPU has with the following.
+```
+cat /proc/cpuinfo| grep flags | head -1
+```
+
+## Installing older or pre-release versions on Linux
+
+If you run into problems on Linux and want to install an older version, or you'd like to try out a pre-release before it's officially released, you can tell the install script which version to install.
+
+```sh
+curl -fsSL https://ollama.com/install.sh | OLLAMA_VERSION="0.1.29" sh
+```
+
+## Linux tmp noexec
+
+If your system is configured with the "noexec" flag where Ollama stores its temporary executable files, you can specify an alternate location by setting OLLAMA_TMPDIR to a location writable by the user ollama runs as. For example OLLAMA_TMPDIR=/usr/share/ollama/
+
+## NVIDIA GPU Discovery
+
+When Ollama starts up, it takes inventory of the GPUs present in the system to determine compatibility and how much VRAM is available. Sometimes this discovery can fail to find your GPUs. In general, running the latest driver will yield the best results.
+
+### Linux NVIDIA Troubleshooting
+
+If you are using a container to run Ollama, make sure you've set up the container runtime first as described in [docker.md](./docker.md)
+
+Sometimes the Ollama can have difficulties initializing the GPU. When you check the server logs, this can show up as various error codes, such as "3" (not initialized), "46" (device unavailable), "100" (no device), "999" (unknown), or others. The following troubleshooting techniques may help resolve the problem
+
+- If you are using a container, is the container runtime working? Try `docker run --gpus all ubuntu nvidia-smi` - if this doesn't work, Ollama wont be able to see your NVIDIA GPU.
+- Is the uvm driver loaded? `sudo nvidia-modprobe -u`
+- Try reloading the nvidia_uvm driver - `sudo rmmod nvidia_uvm` then `sudo modprobe nvidia_uvm`
+- Try rebooting
+- Make sure you're running the latest nvidia drivers
+
+If none of those resolve the problem, gather additional information and file an issue:
+- Set `CUDA_ERROR_LEVEL=50` and try again to get more diagnostic logs
+- Check dmesg for any errors `sudo dmesg | grep -i nvrm` and `sudo dmesg | grep -i nvidia`
+
+
+## Windows Terminal Errors
+
+Older versions of Windows 10 (e.g., 21H1) are known to have a bug where the standard terminal program does not display control characters correctly. This can result in a long string of strings like `←[?25h←[?25l` being displayed, sometimes erroring with `The parameter is incorrect` To resolve this problem, please update to Win 10 22H1 or newer.
diff --git a/ollama/docs/tutorials.md b/ollama/docs/tutorials.md
new file mode 100755
index 0000000000000000000000000000000000000000..0f520c95241905900239136bc324772222e24df1
--- /dev/null
+++ b/ollama/docs/tutorials.md
@@ -0,0 +1,9 @@
+# Tutorials
+
+Here is a list of ways you can use Ollama with other tools to build interesting applications.
+
+- [Using LangChain with Ollama in JavaScript](./tutorials/langchainjs.md)
+- [Using LangChain with Ollama in Python](./tutorials/langchainpy.md)
+- [Running Ollama on NVIDIA Jetson Devices](./tutorials/nvidia-jetson.md)
+
+Also be sure to check out the [examples](../examples) directory for more ways to use Ollama.
diff --git a/ollama/docs/tutorials/fly-gpu.md b/ollama/docs/tutorials/fly-gpu.md
new file mode 100755
index 0000000000000000000000000000000000000000..24802ddb1ee88c8944b6612891de02b952ebd128
--- /dev/null
+++ b/ollama/docs/tutorials/fly-gpu.md
@@ -0,0 +1,83 @@
+# Running Ollama on Fly.io GPU Instances
+
+Ollama runs with little to no configuration on [Fly.io GPU instances](https://fly.io/docs/gpus/gpu-quickstart/). If you don't have access to GPUs yet, you'll need to [apply for access](https://fly.io/gpu/) on the waitlist. Once you're accepted, you'll get an email with instructions on how to get started.
+
+Create a new app with `fly apps create`:
+
+```bash
+fly apps create
+```
+
+Then create a `fly.toml` file in a new folder that looks like this:
+
+```toml
+app = "sparkling-violet-709"
+primary_region = "ord"
+vm.size = "a100-40gb" # see https://fly.io/docs/gpus/gpu-quickstart/ for more info
+
+[build]
+ image = "ollama/ollama"
+
+[http_service]
+ internal_port = 11434
+ force_https = false
+ auto_stop_machines = true
+ auto_start_machines = true
+ min_machines_running = 0
+ processes = ["app"]
+
+[mounts]
+ source = "models"
+ destination = "/root/.ollama"
+ initial_size = "100gb"
+```
+
+Then create a [new private IPv6 address](https://fly.io/docs/reference/private-networking/#flycast-private-load-balancing) for your app:
+
+```bash
+fly ips allocate-v6 --private
+```
+
+Then deploy your app:
+
+```bash
+fly deploy
+```
+
+And finally you can access it interactively with a new Fly.io Machine:
+
+```
+fly machine run -e OLLAMA_HOST=http://your-app-name.flycast --shell ollama/ollama
+```
+
+```bash
+$ ollama run openchat:7b-v3.5-fp16
+>>> How do I bake chocolate chip cookies?
+ To bake chocolate chip cookies, follow these steps:
+
+1. Preheat the oven to 375°F (190°C) and line a baking sheet with parchment paper or silicone baking mat.
+
+2. In a large bowl, mix together 1 cup of unsalted butter (softened), 3/4 cup granulated sugar, and 3/4
+cup packed brown sugar until light and fluffy.
+
+3. Add 2 large eggs, one at a time, to the butter mixture, beating well after each addition. Stir in 1
+teaspoon of pure vanilla extract.
+
+4. In a separate bowl, whisk together 2 cups all-purpose flour, 1/2 teaspoon baking soda, and 1/2 teaspoon
+salt. Gradually add the dry ingredients to the wet ingredients, stirring until just combined.
+
+5. Fold in 2 cups of chocolate chips (or chunks) into the dough.
+
+6. Drop rounded tablespoons of dough onto the prepared baking sheet, spacing them about 2 inches apart.
+
+7. Bake for 10-12 minutes, or until the edges are golden brown. The centers should still be slightly soft.
+
+8. Allow the cookies to cool on the baking sheet for a few minutes before transferring them to a wire rack
+to cool completely.
+
+Enjoy your homemade chocolate chip cookies!
+```
+
+When you set it up like this, it will automatically turn off when you're done using it. Then when you access it again, it will automatically turn back on. This is a great way to save money on GPU instances when you're not using them. If you want a persistent wake-on-use connection to your Ollama instance, you can set up a [connection to your Fly network using WireGuard](https://fly.io/docs/reference/private-networking/#discovering-apps-through-dns-on-a-wireguard-connection). Then you can access your Ollama instance at `http://your-app-name.flycast`.
+
+And that's it!
diff --git a/ollama/docs/tutorials/langchainjs.md b/ollama/docs/tutorials/langchainjs.md
new file mode 100755
index 0000000000000000000000000000000000000000..f925869b5f840099aaa722e85fa83e8422c6255a
--- /dev/null
+++ b/ollama/docs/tutorials/langchainjs.md
@@ -0,0 +1,77 @@
+# Using LangChain with Ollama using JavaScript
+
+In this tutorial, we are going to use JavaScript with LangChain and Ollama to learn about something just a touch more recent. In August 2023, there was a series of wildfires on Maui. There is no way an LLM trained before that time can know about this, since their training data would not include anything as recent as that. So we can find the [Wikipedia article about the fires](https://en.wikipedia.org/wiki/2023_Hawaii_wildfires) and ask questions about the contents.
+
+To get started, let's just use **LangChain** to ask a simple question to a model. To do this with JavaScript, we need to install **LangChain**:
+
+```bash
+npm install @langchain/community
+```
+
+Now we can start building out our JavaScript:
+
+```javascript
+import { Ollama } from "@langchain/community/llms/ollama";
+
+const ollama = new Ollama({
+ baseUrl: "http://localhost:11434",
+ model: "llama3.1",
+});
+
+const answer = await ollama.invoke(`why is the sky blue?`);
+
+console.log(answer);
+```
+
+That will get us the same thing as if we ran `ollama run llama3.1 "why is the sky blue"` in the terminal. But we want to load a document from the web to ask a question against. **Cheerio** is a great library for ingesting a webpage, and **LangChain** uses it in their **CheerioWebBaseLoader**. So let's install **Cheerio** and build that part of the app.
+
+```bash
+npm install cheerio
+```
+
+```javascript
+import { CheerioWebBaseLoader } from "langchain/document_loaders/web/cheerio";
+
+const loader = new CheerioWebBaseLoader("https://en.wikipedia.org/wiki/2023_Hawaii_wildfires");
+const data = await loader.load();
+```
+
+That will load the document. Although this page is smaller than the Odyssey, it is certainly bigger than the context size for most LLMs. So we are going to need to split into smaller pieces, and then select just the pieces relevant to our question. This is a great use for a vector datastore. In this example, we will use the **MemoryVectorStore** that is part of **LangChain**. But there is one more thing we need to get the content into the datastore. We have to run an embeddings process that converts the tokens in the text into a series of vectors. And for that, we are going to use **Tensorflow**. There is a lot of stuff going on in this one. First, install the **Tensorflow** components that we need.
+
+```javascript
+npm install @tensorflow/tfjs-core@3.6.0 @tensorflow/tfjs-converter@3.6.0 @tensorflow-models/universal-sentence-encoder@1.3.3 @tensorflow/tfjs-node@4.10.0
+```
+
+If you just install those components without the version numbers, it will install the latest versions, but there are conflicts within **Tensorflow**, so you need to install the compatible versions.
+
+```javascript
+import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
+import { MemoryVectorStore } from "langchain/vectorstores/memory";
+import "@tensorflow/tfjs-node";
+import { TensorFlowEmbeddings } from "langchain/embeddings/tensorflow";
+
+// Split the text into 500 character chunks. And overlap each chunk by 20 characters
+const textSplitter = new RecursiveCharacterTextSplitter({
+ chunkSize: 500,
+ chunkOverlap: 20
+});
+const splitDocs = await textSplitter.splitDocuments(data);
+
+// Then use the TensorFlow Embedding to store these chunks in the datastore
+const vectorStore = await MemoryVectorStore.fromDocuments(splitDocs, new TensorFlowEmbeddings());
+```
+
+To connect the datastore to a question asked to a LLM, we need to use the concept at the heart of **LangChain**: the chain. Chains are a way to connect a number of activities together to accomplish a particular tasks. There are a number of chain types available, but for this tutorial we are using the **RetrievalQAChain**.
+
+```javascript
+import { RetrievalQAChain } from "langchain/chains";
+
+const retriever = vectorStore.asRetriever();
+const chain = RetrievalQAChain.fromLLM(ollama, retriever);
+const result = await chain.call({query: "When was Hawaii's request for a major disaster declaration approved?"});
+console.log(result.text)
+```
+
+So we created a retriever, which is a way to return the chunks that match a query from a datastore. And then connect the retriever and the model via a chain. Finally, we send a query to the chain, which results in an answer using our document as a source. The answer it returned was correct, August 10, 2023.
+
+And that is a simple introduction to what you can do with **LangChain** and **Ollama.**
diff --git a/ollama/docs/tutorials/langchainpy.md b/ollama/docs/tutorials/langchainpy.md
new file mode 100755
index 0000000000000000000000000000000000000000..06543a070189cc36cb2f737fc8d6667a95e88292
--- /dev/null
+++ b/ollama/docs/tutorials/langchainpy.md
@@ -0,0 +1,85 @@
+# Using LangChain with Ollama in Python
+
+Let's imagine we are studying the classics, such as **the Odyssey** by **Homer**. We might have a question about Neleus and his family. If you ask llama2 for that info, you may get something like:
+
+> I apologize, but I'm a large language model, I cannot provide information on individuals or families that do not exist in reality. Neleus is not a real person or character, and therefore does not have a family or any other personal details. My apologies for any confusion. Is there anything else I can help you with?
+
+This sounds like a typical censored response, but even llama2-uncensored gives a mediocre answer:
+
+> Neleus was a legendary king of Pylos and the father of Nestor, one of the Argonauts. His mother was Clymene, a sea nymph, while his father was Neptune, the god of the sea.
+
+So let's figure out how we can use **LangChain** with Ollama to ask our question to the actual document, the Odyssey by Homer, using Python.
+
+Let's start by asking a simple question that we can get an answer to from the **Llama2** model using **Ollama**. First, we need to install the **LangChain** package:
+
+`pip install langchain_community`
+
+Then we can create a model and ask the question:
+
+```python
+from langchain_community.llms import Ollama
+ollama = Ollama(
+ base_url='http://localhost:11434',
+ model="llama3"
+)
+print(ollama.invoke("why is the sky blue"))
+```
+
+Notice that we are defining the model and the base URL for Ollama.
+
+Now let's load a document to ask questions against. I'll load up the Odyssey by Homer, which you can find at Project Gutenberg. We will need **WebBaseLoader** which is part of **LangChain** and loads text from any webpage. On my machine, I also needed to install **bs4** to get that to work, so run `pip install bs4`.
+
+```python
+from langchain.document_loaders import WebBaseLoader
+loader = WebBaseLoader("https://www.gutenberg.org/files/1727/1727-h/1727-h.htm")
+data = loader.load()
+```
+
+This file is pretty big. Just the preface is 3000 tokens. Which means the full document won't fit into the context for the model. So we need to split it up into smaller pieces.
+
+```python
+from langchain.text_splitter import RecursiveCharacterTextSplitter
+
+text_splitter=RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
+all_splits = text_splitter.split_documents(data)
+```
+
+It's split up, but we have to find the relevant splits and then submit those to the model. We can do this by creating embeddings and storing them in a vector database. We can use Ollama directly to instantiate an embedding model. We will use ChromaDB in this example for a vector database. `pip install chromadb`
+We also need to pull embedding model: `ollama pull nomic-embed-text`
+```python
+from langchain.embeddings import OllamaEmbeddings
+from langchain.vectorstores import Chroma
+oembed = OllamaEmbeddings(base_url="http://localhost:11434", model="nomic-embed-text")
+vectorstore = Chroma.from_documents(documents=all_splits, embedding=oembed)
+```
+
+Now let's ask a question from the document. **Who was Neleus, and who is in his family?** Neleus is a character in the Odyssey, and the answer can be found in our text.
+
+```python
+question="Who is Neleus and who is in Neleus' family?"
+docs = vectorstore.similarity_search(question)
+len(docs)
+```
+
+This will output the number of matches for chunks of data similar to the search.
+
+The next thing is to send the question and the relevant parts of the docs to the model to see if we can get a good answer. But we are stitching two parts of the process together, and that is called a chain. This means we need to define a chain:
+
+```python
+from langchain.chains import RetrievalQA
+qachain=RetrievalQA.from_chain_type(ollama, retriever=vectorstore.as_retriever())
+res = qachain.invoke({"query": question})
+print(res['result'])
+```
+
+The answer received from this chain was:
+
+> Neleus is a character in Homer's "Odyssey" and is mentioned in the context of Penelope's suitors. Neleus is the father of Chloris, who is married to Neleus and bears him several children, including Nestor, Chromius, Periclymenus, and Pero. Amphinomus, the son of Nisus, is also mentioned as a suitor of Penelope and is known for his good natural disposition and agreeable conversation.
+
+It's not a perfect answer, as it implies Neleus married his daughter when actually Chloris "was the youngest daughter to Amphion son of Iasus and king of Minyan Orchomenus, and was Queen in Pylos".
+
+I updated the chunk_overlap for the text splitter to 20 and tried again and got a much better answer:
+
+> Neleus is a character in Homer's epic poem "The Odyssey." He is the husband of Chloris, who is the youngest daughter of Amphion son of Iasus and king of Minyan Orchomenus. Neleus has several children with Chloris, including Nestor, Chromius, Periclymenus, and Pero.
+
+And that is a much better answer.
diff --git a/ollama/docs/tutorials/nvidia-jetson.md b/ollama/docs/tutorials/nvidia-jetson.md
new file mode 100755
index 0000000000000000000000000000000000000000..bb77c486fb177091587d6519cf5cb72c970e8c12
--- /dev/null
+++ b/ollama/docs/tutorials/nvidia-jetson.md
@@ -0,0 +1,15 @@
+# Running Ollama on NVIDIA Jetson Devices
+
+Ollama runs well on [NVIDIA Jetson Devices](https://www.nvidia.com/en-us/autonomous-machines/embedded-systems/) and should run out of the box with the standard installation instructions.
+
+The following has been tested on [JetPack 5.1.2](https://developer.nvidia.com/embedded/jetpack), but should also work on JetPack 6.0.
+
+- Install Ollama via standard Linux command (ignore the 404 error): `curl https://ollama.com/install.sh | sh`
+- Pull the model you want to use (e.g. mistral): `ollama pull mistral`
+- Start an interactive session: `ollama run mistral`
+
+And that's it!
+
+# Running Ollama in Docker
+
+When running GPU accelerated applications in Docker, it is highly recommended to use [dusty-nv jetson-containers repo](https://github.com/dusty-nv/jetson-containers).
\ No newline at end of file
diff --git a/ollama/docs/windows.md b/ollama/docs/windows.md
new file mode 100755
index 0000000000000000000000000000000000000000..dbfc14407acb88ea26ba332418dd265edb15a63f
--- /dev/null
+++ b/ollama/docs/windows.md
@@ -0,0 +1,63 @@
+# Ollama Windows Preview
+
+Welcome to the Ollama Windows preview.
+
+No more WSL required!
+
+Ollama now runs as a native Windows application, including NVIDIA and AMD Radeon GPU support.
+After installing Ollama Windows Preview, Ollama will run in the background and
+the `ollama` command line is available in `cmd`, `powershell` or your favorite
+terminal application. As usual the Ollama [api](./api.md) will be served on
+`http://localhost:11434`.
+
+As this is a preview release, you should expect a few bugs here and there. If
+you run into a problem you can reach out on
+[Discord](https://discord.gg/ollama), or file an
+[issue](https://github.com/ollama/ollama/issues).
+Logs will often be helpful in diagnosing the problem (see
+[Troubleshooting](#troubleshooting) below)
+
+## System Requirements
+
+* Windows 10 22H2 or newer, Home or Pro
+* NVIDIA 452.39 or newer Drivers if you have an NVIDIA card
+* AMD Radeon Driver https://www.amd.com/en/support if you have a Radeon card
+
+Ollama uses unicode characters for progress indication, which may render as unknown squares in some older terminal fonts in Windows 10. If you see this, try changing your terminal font settings.
+
+## API Access
+
+Here's a quick example showing API access from `powershell`
+```powershell
+(Invoke-WebRequest -method POST -Body '{"model":"llama3", "prompt":"Why is the sky blue?", "stream": false}' -uri http://localhost:11434/api/generate ).Content | ConvertFrom-json
+```
+
+## Troubleshooting
+
+While we're in preview, `OLLAMA_DEBUG` is always enabled, which adds
+a "view logs" menu item to the app, and increases logging for the GUI app and
+server.
+
+Ollama on Windows stores files in a few different locations. You can view them in
+the explorer window by hitting `+R` and type in:
+- `explorer %LOCALAPPDATA%\Ollama` contains logs, and downloaded updates
+ - *app.log* contains most resent logs from the GUI application
+ - *server.log* contains the most recent server logs
+ - *upgrade.log* contains log output for upgrades
+- `explorer %LOCALAPPDATA%\Programs\Ollama` contains the binaries (The installer adds this to your user PATH)
+- `explorer %HOMEPATH%\.ollama` contains models and configuration
+- `explorer %TEMP%` contains temporary executable files in one or more `ollama*` directories
+
+
+## Standalone CLI
+
+The easiest way to install Ollama on Windows is to use the `OllamaSetup.exe`
+installer. It installs in your account without requiring Administrator rights.
+We update Ollama regularly to support the latest models, and this installer will
+help you keep up to date.
+
+If you'd like to install or integrate Ollama as a service, a standalone
+`ollama-windows-amd64.zip` zip file is available containing only the Ollama CLI
+and GPU library dependencies for Nvidia and AMD. This allows for embedding
+Ollama in existing applications, or running it as a system service via `ollama
+serve` with tools such as [NSSM](https://nssm.cc/).
diff --git a/ollama/envconfig/config.go b/ollama/envconfig/config.go
new file mode 100755
index 0000000000000000000000000000000000000000..b82b773d4adb2e27c9dc896b223f932d53ea3d0a
--- /dev/null
+++ b/ollama/envconfig/config.go
@@ -0,0 +1,284 @@
+package envconfig
+
+import (
+ "fmt"
+ "log/slog"
+ "math"
+ "net"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Host returns the scheme and host. Host can be configured via the OLLAMA_HOST environment variable.
+// Default is scheme "http" and host "127.0.0.1:11434"
+func Host() *url.URL {
+ defaultPort := "11434"
+
+ s := strings.TrimSpace(Var("OLLAMA_HOST"))
+ scheme, hostport, ok := strings.Cut(s, "://")
+ switch {
+ case !ok:
+ scheme, hostport = "http", s
+ case scheme == "http":
+ defaultPort = "80"
+ case scheme == "https":
+ defaultPort = "443"
+ }
+
+ // trim trailing slashes
+ hostport = strings.TrimRight(hostport, "/")
+
+ host, port, err := net.SplitHostPort(hostport)
+ if err != nil {
+ host, port = "127.0.0.1", defaultPort
+ if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
+ host = ip.String()
+ } else if hostport != "" {
+ host = hostport
+ }
+ }
+
+ if n, err := strconv.ParseInt(port, 10, 32); err != nil || n > 65535 || n < 0 {
+ slog.Warn("invalid port, using default", "port", port, "default", defaultPort)
+ return &url.URL{
+ Scheme: scheme,
+ Host: net.JoinHostPort(host, defaultPort),
+ }
+ }
+
+ return &url.URL{
+ Scheme: scheme,
+ Host: net.JoinHostPort(host, port),
+ }
+}
+
+// Origins returns a list of allowed origins. Origins can be configured via the OLLAMA_ORIGINS environment variable.
+func Origins() (origins []string) {
+ if s := Var("OLLAMA_ORIGINS"); s != "" {
+ origins = strings.Split(s, ",")
+ }
+
+ for _, origin := range []string{"localhost", "127.0.0.1", "0.0.0.0"} {
+ origins = append(origins,
+ fmt.Sprintf("http://%s", origin),
+ fmt.Sprintf("https://%s", origin),
+ fmt.Sprintf("http://%s", net.JoinHostPort(origin, "*")),
+ fmt.Sprintf("https://%s", net.JoinHostPort(origin, "*")),
+ )
+ }
+
+ origins = append(origins,
+ "app://*",
+ "file://*",
+ "tauri://*",
+ )
+
+ return origins
+}
+
+// Models returns the path to the models directory. Models directory can be configured via the OLLAMA_MODELS environment variable.
+// Default is $HOME/.ollama/models
+func Models() string {
+ if s := Var("OLLAMA_MODELS"); s != "" {
+ return s
+ }
+
+ home, err := os.UserHomeDir()
+ if err != nil {
+ panic(err)
+ }
+
+ return filepath.Join(home, ".ollama", "models")
+}
+
+// KeepAlive returns the duration that models stay loaded in memory. KeepAlive can be configured via the OLLAMA_KEEP_ALIVE environment variable.
+// Negative values are treated as infinite. Zero is treated as no keep alive.
+// Default is 5 minutes.
+func KeepAlive() (keepAlive time.Duration) {
+ keepAlive = 5 * time.Minute
+ if s := Var("OLLAMA_KEEP_ALIVE"); s != "" {
+ if d, err := time.ParseDuration(s); err == nil {
+ keepAlive = d
+ } else if n, err := strconv.ParseInt(s, 10, 64); err == nil {
+ keepAlive = time.Duration(n) * time.Second
+ }
+ }
+
+ if keepAlive < 0 {
+ return time.Duration(math.MaxInt64)
+ }
+
+ return keepAlive
+}
+
+func Bool(k string) func() bool {
+ return func() bool {
+ if s := Var(k); s != "" {
+ b, err := strconv.ParseBool(s)
+ if err != nil {
+ return true
+ }
+
+ return b
+ }
+
+ return false
+ }
+}
+
+var (
+ // Debug enabled additional debug information.
+ Debug = Bool("OLLAMA_DEBUG")
+ // FlashAttention enables the experimental flash attention feature.
+ FlashAttention = Bool("OLLAMA_FLASH_ATTENTION")
+ // NoHistory disables readline history.
+ NoHistory = Bool("OLLAMA_NOHISTORY")
+ // NoPrune disables pruning of model blobs on startup.
+ NoPrune = Bool("OLLAMA_NOPRUNE")
+ // SchedSpread allows scheduling models across all GPUs.
+ SchedSpread = Bool("OLLAMA_SCHED_SPREAD")
+ // IntelGPU enables experimental Intel GPU detection.
+ IntelGPU = Bool("OLLAMA_INTEL_GPU")
+)
+
+func String(s string) func() string {
+ return func() string {
+ return Var(s)
+ }
+}
+
+var (
+ LLMLibrary = String("OLLAMA_LLM_LIBRARY")
+ TmpDir = String("OLLAMA_TMPDIR")
+
+ CudaVisibleDevices = String("CUDA_VISIBLE_DEVICES")
+ HipVisibleDevices = String("HIP_VISIBLE_DEVICES")
+ RocrVisibleDevices = String("ROCR_VISIBLE_DEVICES")
+ GpuDeviceOrdinal = String("GPU_DEVICE_ORDINAL")
+ HsaOverrideGfxVersion = String("HSA_OVERRIDE_GFX_VERSION")
+)
+
+func RunnersDir() (p string) {
+ if p := Var("OLLAMA_RUNNERS_DIR"); p != "" {
+ return p
+ }
+
+ if runtime.GOOS != "windows" {
+ return
+ }
+
+ defer func() {
+ if p == "" {
+ slog.Error("unable to locate llm runner directory. Set OLLAMA_RUNNERS_DIR to the location of 'ollama_runners'")
+ }
+ }()
+
+ // On Windows we do not carry the payloads inside the main executable
+ exe, err := os.Executable()
+ if err != nil {
+ return
+ }
+
+ cwd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+
+ var paths []string
+ for _, root := range []string{filepath.Dir(exe), cwd} {
+ paths = append(paths,
+ root,
+ filepath.Join(root, "windows-"+runtime.GOARCH),
+ filepath.Join(root, "dist", "windows-"+runtime.GOARCH),
+ )
+ }
+
+ // Try a few variations to improve developer experience when building from source in the local tree
+ for _, path := range paths {
+ candidate := filepath.Join(path, "ollama_runners")
+ if _, err := os.Stat(candidate); err == nil {
+ p = candidate
+ break
+ }
+ }
+
+ return p
+}
+
+func Uint(key string, defaultValue uint) func() uint {
+ return func() uint {
+ if s := Var(key); s != "" {
+ if n, err := strconv.ParseUint(s, 10, 64); err != nil {
+ slog.Warn("invalid environment variable, using default", "key", key, "value", s, "default", defaultValue)
+ } else {
+ return uint(n)
+ }
+ }
+
+ return defaultValue
+ }
+}
+
+var (
+ // NumParallel sets the number of parallel model requests. NumParallel can be configured via the OLLAMA_NUM_PARALLEL environment variable.
+ NumParallel = Uint("OLLAMA_NUM_PARALLEL", 0)
+ // MaxRunners sets the maximum number of loaded models. MaxRunners can be configured via the OLLAMA_MAX_LOADED_MODELS environment variable.
+ MaxRunners = Uint("OLLAMA_MAX_LOADED_MODELS", 0)
+ // MaxQueue sets the maximum number of queued requests. MaxQueue can be configured via the OLLAMA_MAX_QUEUE environment variable.
+ MaxQueue = Uint("OLLAMA_MAX_QUEUE", 512)
+ // MaxVRAM sets a maximum VRAM override in bytes. MaxVRAM can be configured via the OLLAMA_MAX_VRAM environment variable.
+ MaxVRAM = Uint("OLLAMA_MAX_VRAM", 0)
+)
+
+type EnvVar struct {
+ Name string
+ Value any
+ Description string
+}
+
+func AsMap() map[string]EnvVar {
+ ret := map[string]EnvVar{
+ "OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug(), "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
+ "OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention(), "Enabled flash attention"},
+ "OLLAMA_HOST": {"OLLAMA_HOST", Host(), "IP Address for the ollama server (default 127.0.0.1:11434)"},
+ "OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive(), "The duration that models stay loaded in memory (default \"5m\")"},
+ "OLLAMA_LLM_LIBRARY": {"OLLAMA_LLM_LIBRARY", LLMLibrary(), "Set LLM library to bypass autodetection"},
+ "OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners(), "Maximum number of loaded models per GPU"},
+ "OLLAMA_MAX_QUEUE": {"OLLAMA_MAX_QUEUE", MaxQueue(), "Maximum number of queued requests"},
+ "OLLAMA_MODELS": {"OLLAMA_MODELS", Models(), "The path to the models directory"},
+ "OLLAMA_NOHISTORY": {"OLLAMA_NOHISTORY", NoHistory(), "Do not preserve readline history"},
+ "OLLAMA_NOPRUNE": {"OLLAMA_NOPRUNE", NoPrune(), "Do not prune model blobs on startup"},
+ "OLLAMA_NUM_PARALLEL": {"OLLAMA_NUM_PARALLEL", NumParallel(), "Maximum number of parallel requests"},
+ "OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", Origins(), "A comma separated list of allowed origins"},
+ "OLLAMA_RUNNERS_DIR": {"OLLAMA_RUNNERS_DIR", RunnersDir(), "Location for runners"},
+ "OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
+ "OLLAMA_TMPDIR": {"OLLAMA_TMPDIR", TmpDir(), "Location for temporary files"},
+ }
+ if runtime.GOOS != "darwin" {
+ ret["CUDA_VISIBLE_DEVICES"] = EnvVar{"CUDA_VISIBLE_DEVICES", CudaVisibleDevices(), "Set which NVIDIA devices are visible"}
+ ret["HIP_VISIBLE_DEVICES"] = EnvVar{"HIP_VISIBLE_DEVICES", HipVisibleDevices(), "Set which AMD devices are visible"}
+ ret["ROCR_VISIBLE_DEVICES"] = EnvVar{"ROCR_VISIBLE_DEVICES", RocrVisibleDevices(), "Set which AMD devices are visible"}
+ ret["GPU_DEVICE_ORDINAL"] = EnvVar{"GPU_DEVICE_ORDINAL", GpuDeviceOrdinal(), "Set which AMD devices are visible"}
+ ret["HSA_OVERRIDE_GFX_VERSION"] = EnvVar{"HSA_OVERRIDE_GFX_VERSION", HsaOverrideGfxVersion(), "Override the gfx used for all detected AMD GPUs"}
+ ret["OLLAMA_INTEL_GPU"] = EnvVar{"OLLAMA_INTEL_GPU", IntelGPU(), "Enable experimental Intel GPU detection"}
+ }
+ return ret
+}
+
+func Values() map[string]string {
+ vals := make(map[string]string)
+ for k, v := range AsMap() {
+ vals[k] = fmt.Sprintf("%v", v.Value)
+ }
+ return vals
+}
+
+// Var returns an environment variable stripped of leading and trailing quotes or spaces
+func Var(key string) string {
+ return strings.Trim(strings.TrimSpace(os.Getenv(key)), "\"'")
+}
diff --git a/ollama/envconfig/config_test.go b/ollama/envconfig/config_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..92a500f15b580cad9d5cce02c2d20f15f9b8e6f0
--- /dev/null
+++ b/ollama/envconfig/config_test.go
@@ -0,0 +1,235 @@
+package envconfig
+
+import (
+ "math"
+ "testing"
+ "time"
+
+ "github.com/google/go-cmp/cmp"
+)
+
+func TestHost(t *testing.T) {
+ cases := map[string]struct {
+ value string
+ expect string
+ }{
+ "empty": {"", "127.0.0.1:11434"},
+ "only address": {"1.2.3.4", "1.2.3.4:11434"},
+ "only port": {":1234", ":1234"},
+ "address and port": {"1.2.3.4:1234", "1.2.3.4:1234"},
+ "hostname": {"example.com", "example.com:11434"},
+ "hostname and port": {"example.com:1234", "example.com:1234"},
+ "zero port": {":0", ":0"},
+ "too large port": {":66000", ":11434"},
+ "too small port": {":-1", ":11434"},
+ "ipv6 localhost": {"[::1]", "[::1]:11434"},
+ "ipv6 world open": {"[::]", "[::]:11434"},
+ "ipv6 no brackets": {"::1", "[::1]:11434"},
+ "ipv6 + port": {"[::1]:1337", "[::1]:1337"},
+ "extra space": {" 1.2.3.4 ", "1.2.3.4:11434"},
+ "extra quotes": {"\"1.2.3.4\"", "1.2.3.4:11434"},
+ "extra space+quotes": {" \" 1.2.3.4 \" ", "1.2.3.4:11434"},
+ "extra single quotes": {"'1.2.3.4'", "1.2.3.4:11434"},
+ "http": {"http://1.2.3.4", "1.2.3.4:80"},
+ "http port": {"http://1.2.3.4:4321", "1.2.3.4:4321"},
+ "https": {"https://1.2.3.4", "1.2.3.4:443"},
+ "https port": {"https://1.2.3.4:4321", "1.2.3.4:4321"},
+ }
+
+ for name, tt := range cases {
+ t.Run(name, func(t *testing.T) {
+ t.Setenv("OLLAMA_HOST", tt.value)
+ if host := Host(); host.Host != tt.expect {
+ t.Errorf("%s: expected %s, got %s", name, tt.expect, host.Host)
+ }
+ })
+ }
+}
+
+func TestOrigins(t *testing.T) {
+ cases := []struct {
+ value string
+ expect []string
+ }{
+ {"", []string{
+ "http://localhost",
+ "https://localhost",
+ "http://localhost:*",
+ "https://localhost:*",
+ "http://127.0.0.1",
+ "https://127.0.0.1",
+ "http://127.0.0.1:*",
+ "https://127.0.0.1:*",
+ "http://0.0.0.0",
+ "https://0.0.0.0",
+ "http://0.0.0.0:*",
+ "https://0.0.0.0:*",
+ "app://*",
+ "file://*",
+ "tauri://*",
+ }},
+ {"http://10.0.0.1", []string{
+ "http://10.0.0.1",
+ "http://localhost",
+ "https://localhost",
+ "http://localhost:*",
+ "https://localhost:*",
+ "http://127.0.0.1",
+ "https://127.0.0.1",
+ "http://127.0.0.1:*",
+ "https://127.0.0.1:*",
+ "http://0.0.0.0",
+ "https://0.0.0.0",
+ "http://0.0.0.0:*",
+ "https://0.0.0.0:*",
+ "app://*",
+ "file://*",
+ "tauri://*",
+ }},
+ {"http://172.16.0.1,https://192.168.0.1", []string{
+ "http://172.16.0.1",
+ "https://192.168.0.1",
+ "http://localhost",
+ "https://localhost",
+ "http://localhost:*",
+ "https://localhost:*",
+ "http://127.0.0.1",
+ "https://127.0.0.1",
+ "http://127.0.0.1:*",
+ "https://127.0.0.1:*",
+ "http://0.0.0.0",
+ "https://0.0.0.0",
+ "http://0.0.0.0:*",
+ "https://0.0.0.0:*",
+ "app://*",
+ "file://*",
+ "tauri://*",
+ }},
+ {"http://totally.safe,http://definitely.legit", []string{
+ "http://totally.safe",
+ "http://definitely.legit",
+ "http://localhost",
+ "https://localhost",
+ "http://localhost:*",
+ "https://localhost:*",
+ "http://127.0.0.1",
+ "https://127.0.0.1",
+ "http://127.0.0.1:*",
+ "https://127.0.0.1:*",
+ "http://0.0.0.0",
+ "https://0.0.0.0",
+ "http://0.0.0.0:*",
+ "https://0.0.0.0:*",
+ "app://*",
+ "file://*",
+ "tauri://*",
+ }},
+ }
+ for _, tt := range cases {
+ t.Run(tt.value, func(t *testing.T) {
+ t.Setenv("OLLAMA_ORIGINS", tt.value)
+
+ if diff := cmp.Diff(Origins(), tt.expect); diff != "" {
+ t.Errorf("%s: mismatch (-want +got):\n%s", tt.value, diff)
+ }
+ })
+ }
+}
+
+func TestBool(t *testing.T) {
+ cases := map[string]bool{
+ "": false,
+ "true": true,
+ "false": false,
+ "1": true,
+ "0": false,
+ // invalid values
+ "random": true,
+ "something": true,
+ }
+
+ for k, v := range cases {
+ t.Run(k, func(t *testing.T) {
+ t.Setenv("OLLAMA_BOOL", k)
+ if b := Bool("OLLAMA_BOOL")(); b != v {
+ t.Errorf("%s: expected %t, got %t", k, v, b)
+ }
+ })
+ }
+}
+
+func TestUint(t *testing.T) {
+ cases := map[string]uint{
+ "0": 0,
+ "1": 1,
+ "1337": 1337,
+ // default values
+ "": 11434,
+ "-1": 11434,
+ "0o10": 11434,
+ "0x10": 11434,
+ "string": 11434,
+ }
+
+ for k, v := range cases {
+ t.Run(k, func(t *testing.T) {
+ t.Setenv("OLLAMA_UINT", k)
+ if i := Uint("OLLAMA_UINT", 11434)(); i != v {
+ t.Errorf("%s: expected %d, got %d", k, v, i)
+ }
+ })
+ }
+}
+
+func TestKeepAlive(t *testing.T) {
+ cases := map[string]time.Duration{
+ "": 5 * time.Minute,
+ "1s": time.Second,
+ "1m": time.Minute,
+ "1h": time.Hour,
+ "5m0s": 5 * time.Minute,
+ "1h2m3s": 1*time.Hour + 2*time.Minute + 3*time.Second,
+ "0": time.Duration(0),
+ "60": 60 * time.Second,
+ "120": 2 * time.Minute,
+ "3600": time.Hour,
+ "-0": time.Duration(0),
+ "-1": time.Duration(math.MaxInt64),
+ "-1m": time.Duration(math.MaxInt64),
+ // invalid values
+ " ": 5 * time.Minute,
+ "???": 5 * time.Minute,
+ "1d": 5 * time.Minute,
+ "1y": 5 * time.Minute,
+ "1w": 5 * time.Minute,
+ }
+
+ for tt, expect := range cases {
+ t.Run(tt, func(t *testing.T) {
+ t.Setenv("OLLAMA_KEEP_ALIVE", tt)
+ if actual := KeepAlive(); actual != expect {
+ t.Errorf("%s: expected %s, got %s", tt, expect, actual)
+ }
+ })
+ }
+}
+
+func TestVar(t *testing.T) {
+ cases := map[string]string{
+ "value": "value",
+ " value ": "value",
+ " 'value' ": "value",
+ ` "value" `: "value",
+ " ' value ' ": " value ",
+ ` " value " `: " value ",
+ }
+
+ for k, v := range cases {
+ t.Run(k, func(t *testing.T) {
+ t.Setenv("OLLAMA_VAR", k)
+ if s := Var("OLLAMA_VAR"); s != v {
+ t.Errorf("%s: expected %q, got %q", k, v, s)
+ }
+ })
+ }
+}
diff --git a/ollama/examples/.gitignore b/ollama/examples/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..b60652b65cff6be44ba3932bb5a57c5f1e89bd47
--- /dev/null
+++ b/ollama/examples/.gitignore
@@ -0,0 +1,174 @@
+node_modules
+bun.lockb
+.vscode
+# OSX
+.DS_STORE
+
+
+# Models
+models/
+
+# Local Chroma db
+.chroma/
+db/
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/ollama/examples/README.md b/ollama/examples/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..b10a34914eb0356fbf5627b5a4c46443f584fd01
--- /dev/null
+++ b/ollama/examples/README.md
@@ -0,0 +1,3 @@
+# Examples
+
+This directory contains different examples of using Ollama.
diff --git a/ollama/examples/flyio/.gitignore b/ollama/examples/flyio/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..0501d092d265229a397885ebe7d7eddf91119e43
--- /dev/null
+++ b/ollama/examples/flyio/.gitignore
@@ -0,0 +1 @@
+fly.toml
diff --git a/ollama/examples/flyio/README.md b/ollama/examples/flyio/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..09b90aaded1bf59ffb9a2e8f577bb689106bf47b
--- /dev/null
+++ b/ollama/examples/flyio/README.md
@@ -0,0 +1,67 @@
+# Deploy Ollama to Fly.io
+
+> Note: this example exposes a public endpoint and does not configure authentication. Use with care.
+
+## Prerequisites
+
+- Ollama: https://ollama.com/download
+- Fly.io account. Sign up for a free account: https://fly.io/app/sign-up
+
+## Steps
+
+1. Login to Fly.io
+
+ ```bash
+ fly auth login
+ ```
+
+1. Create a new Fly app
+
+ ```bash
+ fly launch --name --image ollama/ollama --internal-port 11434 --vm-size shared-cpu-8x --now
+ ```
+
+1. Pull and run `orca-mini:3b`
+
+ ```bash
+ OLLAMA_HOST=https://.fly.dev ollama run orca-mini:3b
+ ```
+
+`shared-cpu-8x` is a free-tier eligible machine type. For better performance, switch to a `performance` or `dedicated` machine type or attach a GPU for hardware acceleration (see below).
+
+## (Optional) Persistent Volume
+
+By default Fly Machines use ephemeral storage which is problematic if you want to use the same model across restarts without pulling it again. Create and attach a persistent volume to store the downloaded models:
+
+1. Create the Fly Volume
+
+ ```bash
+ fly volume create ollama
+ ```
+
+1. Update `fly.toml` and add `[mounts]`
+
+ ```toml
+ [mounts]
+ source = "ollama"
+ destination = "/mnt/ollama/models"
+ ```
+
+1. Update `fly.toml` and add `[env]`
+
+ ```toml
+ [env]
+ OLLAMA_MODELS = "/mnt/ollama/models"
+ ```
+
+1. Deploy your app
+
+ ```bash
+ fly deploy
+ ```
+
+## (Optional) Hardware Acceleration
+
+Fly.io GPU is currently in waitlist. Sign up for the waitlist: https://fly.io/gpu
+
+Once you've been accepted, create the app with the additional flags `--vm-gpu-kind a100-pcie-40gb` or `--vm-gpu-kind a100-pcie-80gb`.
diff --git a/ollama/examples/go-chat/main.go b/ollama/examples/go-chat/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..7663fb8f4ee0e1a12906a07329c920001f6c615b
--- /dev/null
+++ b/ollama/examples/go-chat/main.go
@@ -0,0 +1,51 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+
+ "github.com/ollama/ollama/api"
+)
+
+func main() {
+ client, err := api.ClientFromEnvironment()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ messages := []api.Message{
+ api.Message{
+ Role: "system",
+ Content: "Provide very brief, concise responses",
+ },
+ api.Message{
+ Role: "user",
+ Content: "Name some unusual animals",
+ },
+ api.Message{
+ Role: "assistant",
+ Content: "Monotreme, platypus, echidna",
+ },
+ api.Message{
+ Role: "user",
+ Content: "which of these is the most dangerous?",
+ },
+ }
+
+ ctx := context.Background()
+ req := &api.ChatRequest{
+ Model: "llama3.1",
+ Messages: messages,
+ }
+
+ respFunc := func(resp api.ChatResponse) error {
+ fmt.Print(resp.Message.Content)
+ return nil
+ }
+
+ err = client.Chat(ctx, req, respFunc)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/ollama/examples/go-generate-streaming/main.go b/ollama/examples/go-generate-streaming/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..3acfb22a9fc45cab4d57e01649f74c8f130ef57c
--- /dev/null
+++ b/ollama/examples/go-generate-streaming/main.go
@@ -0,0 +1,40 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+
+ "github.com/ollama/ollama/api"
+)
+
+func main() {
+ client, err := api.ClientFromEnvironment()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // By default, GenerateRequest is streaming.
+ req := &api.GenerateRequest{
+ Model: "gemma2",
+ Prompt: "how many planets are there?",
+ }
+
+ ctx := context.Background()
+ respFunc := func(resp api.GenerateResponse) error {
+ // Only print the response here; GenerateResponse has a number of other
+ // interesting fields you want to examine.
+
+ // In streaming mode, responses are partial so we call fmt.Print (and not
+ // Println) in order to avoid spurious newlines being introduced. The
+ // model will insert its own newlines if it wants.
+ fmt.Print(resp.Response)
+ return nil
+ }
+
+ err = client.Generate(ctx, req, respFunc)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println()
+}
diff --git a/ollama/examples/go-generate/main.go b/ollama/examples/go-generate/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..2fe28742b5a63d5f8080a27508592ddd08272afa
--- /dev/null
+++ b/ollama/examples/go-generate/main.go
@@ -0,0 +1,37 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+
+ "github.com/ollama/ollama/api"
+)
+
+func main() {
+ client, err := api.ClientFromEnvironment()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ req := &api.GenerateRequest{
+ Model: "gemma2",
+ Prompt: "how many planets are there?",
+
+ // set streaming to false
+ Stream: new(bool),
+ }
+
+ ctx := context.Background()
+ respFunc := func(resp api.GenerateResponse) error {
+ // Only print the response here; GenerateResponse has a number of other
+ // interesting fields you want to examine.
+ fmt.Println(resp.Response)
+ return nil
+ }
+
+ err = client.Generate(ctx, req, respFunc)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/ollama/examples/go-http-generate/main.go b/ollama/examples/go-http-generate/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..e5b643486152b24c60f993eb8eec21f03281be5f
--- /dev/null
+++ b/ollama/examples/go-http-generate/main.go
@@ -0,0 +1,29 @@
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "os"
+)
+
+func main() {
+ body := []byte(`{"model":"mistral"}`)
+ resp, err := http.Post("http://localhost:11434/api/generate", "application/json", bytes.NewBuffer(body))
+
+ if err != nil {
+ fmt.Print(err.Error())
+ os.Exit(1)
+ }
+
+ defer resp.Body.Close()
+
+ responseData, err := io.ReadAll(resp.Body)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(string(responseData))
+
+}
diff --git a/ollama/examples/go-multimodal/main.go b/ollama/examples/go-multimodal/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..0b0f19ed93501bc917ee36b2f8a1c70728d5eab0
--- /dev/null
+++ b/ollama/examples/go-multimodal/main.go
@@ -0,0 +1,47 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "os"
+
+ "github.com/ollama/ollama/api"
+)
+
+func main() {
+ if len(os.Args) <= 1 {
+ log.Fatal("usage: ")
+ }
+
+ imgData, err := os.ReadFile(os.Args[1])
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ client, err := api.ClientFromEnvironment()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ req := &api.GenerateRequest{
+ Model: "llava",
+ Prompt: "describe this image",
+ Images: []api.ImageData{imgData},
+ }
+
+ ctx := context.Background()
+ respFunc := func(resp api.GenerateResponse) error {
+ // In streaming mode, responses are partial so we call fmt.Print (and not
+ // Println) in order to avoid spurious newlines being introduced. The
+ // model will insert its own newlines if it wants.
+ fmt.Print(resp.Response)
+ return nil
+ }
+
+ err = client.Generate(ctx, req, respFunc)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println()
+}
diff --git a/ollama/examples/go-pull-progress/main.go b/ollama/examples/go-pull-progress/main.go
new file mode 100755
index 0000000000000000000000000000000000000000..7486336856052474aa43d6f7a0991d560254aa82
--- /dev/null
+++ b/ollama/examples/go-pull-progress/main.go
@@ -0,0 +1,31 @@
+package main
+
+import (
+ "context"
+ "fmt"
+ "log"
+
+ "github.com/ollama/ollama/api"
+)
+
+func main() {
+ client, err := api.ClientFromEnvironment()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ ctx := context.Background()
+
+ req := &api.PullRequest{
+ Model: "mistral",
+ }
+ progressFunc := func(resp api.ProgressResponse) error {
+ fmt.Printf("Progress: status=%v, total=%v, completed=%v\n", resp.Status, resp.Total, resp.Completed)
+ return nil
+ }
+
+ err = client.Pull(ctx, req, progressFunc)
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/ollama/examples/jupyter-notebook/README.md b/ollama/examples/jupyter-notebook/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..fba6802f4c33ace80e6de42cc5646bc51d00f888
--- /dev/null
+++ b/ollama/examples/jupyter-notebook/README.md
@@ -0,0 +1,5 @@
+# Ollama Jupyter Notebook
+
+This example downloads and installs Ollama in a Jupyter instance such as Google Colab. It will start the Ollama service and expose an endpoint using `ngrok` which can be used to communicate with the Ollama instance remotely.
+
+For best results, use an instance with GPU accelerator.
diff --git a/ollama/examples/jupyter-notebook/ollama.ipynb b/ollama/examples/jupyter-notebook/ollama.ipynb
new file mode 100755
index 0000000000000000000000000000000000000000..bee353cbd295d43854ca388ef23f6df7a8f033c8
--- /dev/null
+++ b/ollama/examples/jupyter-notebook/ollama.ipynb
@@ -0,0 +1,102 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "93f59dcb-c588-41b8-a792-55d88ade739c",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Download and run the Ollama Linux install script\n",
+ "!curl -fsSL https://ollama.com/install.sh | sh\n",
+ "!command -v systemctl >/dev/null && sudo systemctl stop ollama"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "658c147e-c7f8-490e-910e-62b80f577dda",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "!pip install aiohttp pyngrok\n",
+ "\n",
+ "import os\n",
+ "import asyncio\n",
+ "from aiohttp import ClientSession\n",
+ "\n",
+ "# Set LD_LIBRARY_PATH so the system NVIDIA library becomes preferred\n",
+ "# over the built-in library. This is particularly important for \n",
+ "# Google Colab which installs older drivers\n",
+ "os.environ.update({'LD_LIBRARY_PATH': '/usr/lib64-nvidia'})\n",
+ "\n",
+ "async def run(cmd):\n",
+ " '''\n",
+ " run is a helper function to run subcommands asynchronously.\n",
+ " '''\n",
+ " print('>>> starting', *cmd)\n",
+ " p = await asyncio.subprocess.create_subprocess_exec(\n",
+ " *cmd,\n",
+ " stdout=asyncio.subprocess.PIPE,\n",
+ " stderr=asyncio.subprocess.PIPE,\n",
+ " )\n",
+ "\n",
+ " async def pipe(lines):\n",
+ " async for line in lines:\n",
+ " print(line.strip().decode('utf-8'))\n",
+ "\n",
+ " await asyncio.gather(\n",
+ " pipe(p.stdout),\n",
+ " pipe(p.stderr),\n",
+ " )\n",
+ "\n",
+ "\n",
+ "await asyncio.gather(\n",
+ " run(['ollama', 'serve']),\n",
+ " run(['ngrok', 'http', '--log', 'stderr', '11434']),\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e7735a55-9aad-4caf-8683-52e2163ba53b",
+ "metadata": {},
+ "source": [
+ "The previous cell starts two processes, `ollama` and `ngrok`. The log output will show a line like the following which describes the external address.\n",
+ "\n",
+ "```\n",
+ "t=2023-11-12T22:55:56+0000 lvl=info msg=\"started tunnel\" obj=tunnels name=command_line addr=http://localhost:11434 url=https://8249-34-125-179-11.ngrok.io\n",
+ "```\n",
+ "\n",
+ "The external address in this case is `https://8249-34-125-179-11.ngrok.io` which can be passed into `OLLAMA_HOST` to access this instance.\n",
+ "\n",
+ "```bash\n",
+ "export OLLAMA_HOST=https://8249-34-125-179-11.ngrok.io\n",
+ "ollama list\n",
+ "ollama run mistral\n",
+ "```"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/ollama/examples/kubernetes/README.md b/ollama/examples/kubernetes/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..2e2444c7cb73f41d39879e059ff8a31f2ae3ba3e
--- /dev/null
+++ b/ollama/examples/kubernetes/README.md
@@ -0,0 +1,38 @@
+# Deploy Ollama to Kubernetes
+
+## Prerequisites
+
+- Ollama: https://ollama.com/download
+- Kubernetes cluster. This example will use Google Kubernetes Engine.
+
+## Steps
+
+1. Create the Ollama namespace, deployment, and service
+
+ ```bash
+ kubectl apply -f cpu.yaml
+ ```
+
+## (Optional) Hardware Acceleration
+
+Hardware acceleration in Kubernetes requires NVIDIA's [`k8s-device-plugin`](https://github.com/NVIDIA/k8s-device-plugin) which is deployed in Kubernetes in form of daemonset. Follow the link for more details.
+
+Once configured, create a GPU enabled Ollama deployment.
+
+```bash
+kubectl apply -f gpu.yaml
+```
+
+## Test
+
+1. Port forward the Ollama service to connect and use it locally
+
+ ```bash
+ kubectl -n ollama port-forward service/ollama 11434:80
+ ```
+
+1. Pull and run a model, for example `orca-mini:3b`
+
+ ```bash
+ ollama run orca-mini:3b
+ ```
\ No newline at end of file
diff --git a/ollama/examples/kubernetes/cpu.yaml b/ollama/examples/kubernetes/cpu.yaml
new file mode 100755
index 0000000000000000000000000000000000000000..b8ddcdde6171862238c84607b9c5a69213c9800e
--- /dev/null
+++ b/ollama/examples/kubernetes/cpu.yaml
@@ -0,0 +1,42 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+ name: ollama
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: ollama
+ namespace: ollama
+spec:
+ selector:
+ matchLabels:
+ name: ollama
+ template:
+ metadata:
+ labels:
+ name: ollama
+ spec:
+ containers:
+ - name: ollama
+ image: ollama/ollama:latest
+ ports:
+ - name: http
+ containerPort: 11434
+ protocol: TCP
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: ollama
+ namespace: ollama
+spec:
+ type: ClusterIP
+ selector:
+ name: ollama
+ ports:
+ - port: 80
+ name: http
+ targetPort: http
+ protocol: TCP
diff --git a/ollama/examples/kubernetes/gpu.yaml b/ollama/examples/kubernetes/gpu.yaml
new file mode 100755
index 0000000000000000000000000000000000000000..ba90abb614081ff8accce9f5cf30da9a43da5393
--- /dev/null
+++ b/ollama/examples/kubernetes/gpu.yaml
@@ -0,0 +1,58 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+ name: ollama
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: ollama
+ namespace: ollama
+spec:
+ strategy:
+ type: Recreate
+ selector:
+ matchLabels:
+ name: ollama
+ template:
+ metadata:
+ labels:
+ name: ollama
+ spec:
+ containers:
+ - name: ollama
+ image: ollama/ollama:latest
+ env:
+ - name: PATH
+ value: /usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ - name: LD_LIBRARY_PATH
+ value: /usr/local/nvidia/lib:/usr/local/nvidia/lib64
+ - name: NVIDIA_DRIVER_CAPABILITIES
+ value: compute,utility
+ ports:
+ - name: http
+ containerPort: 11434
+ protocol: TCP
+ resources:
+ limits:
+ nvidia.com/gpu: 1
+ tolerations:
+ - key: nvidia.com/gpu
+ operator: Exists
+ effect: NoSchedule
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: ollama
+ namespace: ollama
+spec:
+ type: ClusterIP
+ selector:
+ name: ollama
+ ports:
+ - port: 80
+ name: http
+ targetPort: http
+ protocol: TCP
diff --git a/ollama/examples/langchain-python-rag-document/README.md b/ollama/examples/langchain-python-rag-document/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..e2f3bc02824500aae0d66cc8934a7d5739f2c3c3
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-document/README.md
@@ -0,0 +1,29 @@
+# LangChain Document QA
+
+This example provides an interface for asking questions to a PDF document.
+
+## Setup
+
+1. Ensure you have the `llama3.1` model installed:
+
+```
+ollama pull llama3.1
+```
+
+2. Install the Python Requirements.
+
+```
+pip install -r requirements.txt
+```
+
+## Run
+
+```
+python main.py
+```
+
+A prompt will appear, where questions may be asked:
+
+```
+Query: How many locations does WeWork have?
+```
diff --git a/ollama/examples/langchain-python-rag-document/main.py b/ollama/examples/langchain-python-rag-document/main.py
new file mode 100755
index 0000000000000000000000000000000000000000..6f7cec9be3faab6d01cb60bbbeb904c924f60854
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-document/main.py
@@ -0,0 +1,61 @@
+from langchain.document_loaders import OnlinePDFLoader
+from langchain.vectorstores import Chroma
+from langchain.embeddings import GPT4AllEmbeddings
+from langchain import PromptTemplate
+from langchain.llms import Ollama
+from langchain.callbacks.manager import CallbackManager
+from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
+from langchain.chains import RetrievalQA
+import sys
+import os
+
+class SuppressStdout:
+ def __enter__(self):
+ self._original_stdout = sys.stdout
+ self._original_stderr = sys.stderr
+ sys.stdout = open(os.devnull, 'w')
+ sys.stderr = open(os.devnull, 'w')
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ sys.stdout.close()
+ sys.stdout = self._original_stdout
+ sys.stderr = self._original_stderr
+
+# load the pdf and split it into chunks
+loader = OnlinePDFLoader("https://d18rn0p25nwr6d.cloudfront.net/CIK-0001813756/975b3e9b-268e-4798-a9e4-2a9a7c92dc10.pdf")
+data = loader.load()
+
+from langchain.text_splitter import RecursiveCharacterTextSplitter
+text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0)
+all_splits = text_splitter.split_documents(data)
+
+with SuppressStdout():
+ vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings())
+
+while True:
+ query = input("\nQuery: ")
+ if query == "exit":
+ break
+ if query.strip() == "":
+ continue
+
+ # Prompt
+ template = """Use the following pieces of context to answer the question at the end.
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
+ Use three sentences maximum and keep the answer as concise as possible.
+ {context}
+ Question: {question}
+ Helpful Answer:"""
+ QA_CHAIN_PROMPT = PromptTemplate(
+ input_variables=["context", "question"],
+ template=template,
+ )
+
+ llm = Ollama(model="llama3.1", callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]))
+ qa_chain = RetrievalQA.from_chain_type(
+ llm,
+ retriever=vectorstore.as_retriever(),
+ chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
+ )
+
+ result = qa_chain({"query": query})
diff --git a/ollama/examples/langchain-python-rag-document/requirements.txt b/ollama/examples/langchain-python-rag-document/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..09a54191949165a99891cf7865c6db54b3e4dffe
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-document/requirements.txt
@@ -0,0 +1,109 @@
+absl-py==1.4.0
+aiohttp==3.8.5
+aiosignal==1.3.1
+anyio==3.7.1
+astunparse==1.6.3
+async-timeout==4.0.3
+attrs==23.1.0
+backoff==2.2.1
+beautifulsoup4==4.12.2
+bs4==0.0.1
+cachetools==5.3.1
+certifi==2023.7.22
+cffi==1.15.1
+chardet==5.2.0
+charset-normalizer==3.2.0
+Chroma==0.2.0
+chroma-hnswlib==0.7.2
+chromadb==0.4.5
+click==8.1.6
+coloredlogs==15.0.1
+cryptography==41.0.3
+dataclasses-json==0.5.14
+fastapi==0.99.1
+filetype==1.2.0
+flatbuffers==23.5.26
+frozenlist==1.4.0
+gast==0.4.0
+google-auth==2.22.0
+google-auth-oauthlib==1.0.0
+google-pasta==0.2.0
+gpt4all==1.0.8
+grpcio==1.57.0
+h11==0.14.0
+h5py==3.9.0
+httptools==0.6.0
+humanfriendly==10.0
+idna==3.4
+importlib-resources==6.0.1
+joblib==1.3.2
+keras==2.13.1
+langchain==0.0.261
+langsmith==0.0.21
+libclang==16.0.6
+lxml==4.9.3
+Markdown==3.4.4
+MarkupSafe==2.1.3
+marshmallow==3.20.1
+monotonic==1.6
+mpmath==1.3.0
+multidict==6.0.4
+mypy-extensions==1.0.0
+nltk==3.8.1
+numexpr==2.8.5
+numpy==1.24.3
+oauthlib==3.2.2
+onnxruntime==1.15.1
+openapi-schema-pydantic==1.2.4
+opt-einsum==3.3.0
+overrides==7.4.0
+packaging==23.1
+pdf2image==1.16.3
+pdfminer==20191125
+pdfminer.six==20221105
+Pillow==10.0.0
+posthog==3.0.1
+protobuf==4.24.0
+pulsar-client==3.2.0
+pyasn1==0.5.0
+pyasn1-modules==0.3.0
+pycparser==2.21
+pycryptodome==3.18.0
+pydantic==1.10.12
+PyPika==0.48.9
+python-dateutil==2.8.2
+python-dotenv==1.0.0
+python-magic==0.4.27
+PyYAML==6.0.1
+regex==2023.8.8
+requests==2.31.0
+requests-oauthlib==1.3.1
+rsa==4.9
+six==1.16.0
+sniffio==1.3.0
+soupsieve==2.4.1
+SQLAlchemy==2.0.19
+starlette==0.27.0
+sympy==1.12
+tabulate==0.9.0
+tenacity==8.2.2
+tensorboard==2.13.0
+tensorboard-data-server==0.7.1
+tensorflow==2.13.0
+tensorflow-estimator==2.13.0
+tensorflow-hub==0.14.0
+tensorflow-macos==2.13.0
+termcolor==2.3.0
+tokenizers==0.13.3
+tqdm==4.66.1
+typing-inspect==0.9.0
+typing_extensions==4.5.0
+unstructured==0.9.2
+urllib3==1.26.16
+uvicorn==0.23.2
+uvloop==0.17.0
+watchfiles==0.19.0
+websockets==11.0.3
+Werkzeug==2.3.6
+wrapt==1.15.0
+yarl==1.9.2
diff --git a/ollama/examples/langchain-python-rag-privategpt/.gitignore b/ollama/examples/langchain-python-rag-privategpt/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..240b29e5a886c7919b61947644574f903e00d9ad
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/.gitignore
@@ -0,0 +1,170 @@
+# OSX
+.DS_STORE
+
+# Models
+models/
+
+# Local Chroma db
+.chroma/
+db/
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/ollama/examples/langchain-python-rag-privategpt/LICENSE b/ollama/examples/langchain-python-rag-privategpt/LICENSE
new file mode 100755
index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/ollama/examples/langchain-python-rag-privategpt/README.md b/ollama/examples/langchain-python-rag-privategpt/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..0133fd88b3174abd0680af0abcf7b54509f28207
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/README.md
@@ -0,0 +1,91 @@
+# PrivateGPT with Llama 2 uncensored
+
+https://github.com/ollama/ollama/assets/3325447/20cf8ec6-ff25-42c6-bdd8-9be594e3ce1b
+
+> Note: this example is a slightly modified version of PrivateGPT using models such as Llama 2 Uncensored. All credit for PrivateGPT goes to Iván Martínez who is the creator of it, and you can find his GitHub repo [here](https://github.com/imartinez/privateGPT).
+
+### Setup
+
+Set up a virtual environment (optional):
+
+```
+python3 -m venv .venv
+source .venv/bin/activate
+```
+
+Install the Python dependencies:
+
+```shell
+pip install -r requirements.txt
+```
+
+Pull the model you'd like to use:
+
+```
+ollama pull llama2-uncensored
+```
+
+### Getting WeWork's latest quarterly earnings report (10-Q)
+
+```
+mkdir source_documents
+curl https://d18rn0p25nwr6d.cloudfront.net/CIK-0001813756/975b3e9b-268e-4798-a9e4-2a9a7c92dc10.pdf -o source_documents/wework.pdf
+```
+
+### Ingesting files
+
+```shell
+python ingest.py
+```
+
+Output should look like this:
+
+```shell
+Creating new vectorstore
+Loading documents from source_documents
+Loading new documents: 100%|██████████████████████| 1/1 [00:01<00:00, 1.73s/it]
+Loaded 1 new documents from source_documents
+Split into 90 chunks of text (max. 500 tokens each)
+Creating embeddings. May take some minutes...
+Using embedded DuckDB with persistence: data will be stored in: db
+Ingestion complete! You can now run privateGPT.py to query your documents
+```
+
+### Ask questions
+
+```shell
+python privateGPT.py
+
+Enter a query: How many locations does WeWork have?
+
+> Answer (took 17.7 s.):
+As of June 2023, WeWork has 777 locations worldwide, including 610 Consolidated Locations (as defined in the section entitled Key Performance Indicators).
+```
+
+### Try a different model:
+
+```
+ollama pull llama2:13b
+MODEL=llama2:13b python privateGPT.py
+```
+
+## Adding more files
+
+Put any and all your files into the `source_documents` directory
+
+The supported extensions are:
+
+- `.csv`: CSV,
+- `.docx`: Word Document,
+- `.doc`: Word Document,
+- `.enex`: EverNote,
+- `.eml`: Email,
+- `.epub`: EPub,
+- `.html`: HTML File,
+- `.md`: Markdown,
+- `.msg`: Outlook Message,
+- `.odt`: Open Document Text,
+- `.pdf`: Portable Document Format (PDF),
+- `.pptx` : PowerPoint Document,
+- `.ppt` : PowerPoint Document,
+- `.txt`: Text file (UTF-8),
diff --git a/ollama/examples/langchain-python-rag-privategpt/constants.py b/ollama/examples/langchain-python-rag-privategpt/constants.py
new file mode 100755
index 0000000000000000000000000000000000000000..56dda79566dcce3634fea12da9b767d6d1209bd0
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/constants.py
@@ -0,0 +1,11 @@
+import os
+from chromadb.config import Settings
+
+# Define the folder for storing database
+PERSIST_DIRECTORY = os.environ.get('PERSIST_DIRECTORY', 'db')
+
+# Define the Chroma settings
+CHROMA_SETTINGS = Settings(
+ persist_directory=PERSIST_DIRECTORY,
+ anonymized_telemetry=False
+)
diff --git a/ollama/examples/langchain-python-rag-privategpt/ingest.py b/ollama/examples/langchain-python-rag-privategpt/ingest.py
new file mode 100755
index 0000000000000000000000000000000000000000..0f71ccf0714a32ccbfbf0771a8cc4f78ba99d1f1
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/ingest.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python3
+import os
+import glob
+from typing import List
+from multiprocessing import Pool
+from tqdm import tqdm
+
+from langchain.document_loaders import (
+ CSVLoader,
+ EverNoteLoader,
+ PyMuPDFLoader,
+ TextLoader,
+ UnstructuredEmailLoader,
+ UnstructuredEPubLoader,
+ UnstructuredHTMLLoader,
+ UnstructuredMarkdownLoader,
+ UnstructuredODTLoader,
+ UnstructuredPowerPointLoader,
+ UnstructuredWordDocumentLoader,
+)
+
+from langchain.text_splitter import RecursiveCharacterTextSplitter
+from langchain.vectorstores import Chroma
+from langchain.embeddings import HuggingFaceEmbeddings
+from langchain.docstore.document import Document
+from constants import CHROMA_SETTINGS
+
+
+# Load environment variables
+persist_directory = os.environ.get('PERSIST_DIRECTORY', 'db')
+source_directory = os.environ.get('SOURCE_DIRECTORY', 'source_documents')
+embeddings_model_name = os.environ.get('EMBEDDINGS_MODEL_NAME', 'all-MiniLM-L6-v2')
+chunk_size = 500
+chunk_overlap = 50
+
+# Custom document loaders
+class MyElmLoader(UnstructuredEmailLoader):
+ """Wrapper to fallback to text/plain when default does not work"""
+
+ def load(self) -> List[Document]:
+ """Wrapper adding fallback for elm without html"""
+ try:
+ try:
+ doc = UnstructuredEmailLoader.load(self)
+ except ValueError as e:
+ if 'text/html content not found in email' in str(e):
+ # Try plain text
+ self.unstructured_kwargs["content_source"]="text/plain"
+ doc = UnstructuredEmailLoader.load(self)
+ else:
+ raise
+ except Exception as e:
+ # Add file_path to exception message
+ raise type(e)(f"{self.file_path}: {e}") from e
+
+ return doc
+
+
+# Map file extensions to document loaders and their arguments
+LOADER_MAPPING = {
+ ".csv": (CSVLoader, {}),
+ # ".docx": (Docx2txtLoader, {}),
+ ".doc": (UnstructuredWordDocumentLoader, {}),
+ ".docx": (UnstructuredWordDocumentLoader, {}),
+ ".enex": (EverNoteLoader, {}),
+ ".eml": (MyElmLoader, {}),
+ ".epub": (UnstructuredEPubLoader, {}),
+ ".html": (UnstructuredHTMLLoader, {}),
+ ".md": (UnstructuredMarkdownLoader, {}),
+ ".odt": (UnstructuredODTLoader, {}),
+ ".pdf": (PyMuPDFLoader, {}),
+ ".ppt": (UnstructuredPowerPointLoader, {}),
+ ".pptx": (UnstructuredPowerPointLoader, {}),
+ ".txt": (TextLoader, {"encoding": "utf8"}),
+ # Add more mappings for other file extensions and loaders as needed
+}
+
+
+def load_single_document(file_path: str) -> List[Document]:
+ if os.path.getsize(file_path) != 0:
+ filename, ext = os.path.splitext(file_path)
+ if ext in LOADER_MAPPING:
+ loader_class, loader_args = LOADER_MAPPING[ext]
+ try:
+ loader = loader_class(file_path, **loader_args)
+ if loader:
+ return loader.load()
+ except:
+ print(f"Corrupted file {file_path}. Ignoring it.")
+ else:
+ print(f"Unsupported file {file_path}. Ignoring it.")
+ else:
+ print(f"Empty file {file_path}. Ignoring it.")
+
+
+def load_documents(source_dir: str, ignored_files: List[str] = []) -> List[Document]:
+ """
+ Loads all documents from the source documents directory, ignoring specified files
+ """
+ all_files = []
+ for ext in LOADER_MAPPING:
+ all_files.extend(
+ glob.glob(os.path.join(source_dir, f"**/*{ext}"), recursive=True)
+ )
+ filtered_files = [file_path for file_path in all_files if file_path not in ignored_files]
+
+ with Pool(processes=os.cpu_count()) as pool:
+ results = []
+ with tqdm(total=len(filtered_files), desc='Loading new documents', ncols=80) as pbar:
+ for i, docs in enumerate(pool.imap_unordered(load_single_document, filtered_files)):
+ if docs:
+ results.extend(docs)
+ pbar.update()
+
+ return results
+
+def process_documents(ignored_files: List[str] = []) -> List[Document]:
+ """
+ Load documents and split in chunks
+ """
+ print(f"Loading documents from {source_directory}")
+ documents = load_documents(source_directory, ignored_files)
+ if not documents:
+ print("No new documents to load")
+ exit(0)
+ print(f"Loaded {len(documents)} new documents from {source_directory}")
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_overlap)
+ texts = text_splitter.split_documents(documents)
+ print(f"Split into {len(texts)} chunks of text (max. {chunk_size} tokens each)")
+ return texts
+
+def does_vectorstore_exist(persist_directory: str) -> bool:
+ """
+ Checks if vectorstore exists
+ """
+ if os.path.exists(os.path.join(persist_directory, 'index')):
+ if os.path.exists(os.path.join(persist_directory, 'chroma-collections.parquet')) and os.path.exists(os.path.join(persist_directory, 'chroma-embeddings.parquet')):
+ list_index_files = glob.glob(os.path.join(persist_directory, 'index/*.bin'))
+ list_index_files += glob.glob(os.path.join(persist_directory, 'index/*.pkl'))
+ # At least 3 documents are needed in a working vectorstore
+ if len(list_index_files) > 3:
+ return True
+ return False
+
+def main():
+ # Create embeddings
+ embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)
+
+ if does_vectorstore_exist(persist_directory):
+ # Update and store locally vectorstore
+ print(f"Appending to existing vectorstore at {persist_directory}")
+ db = Chroma(persist_directory=persist_directory, embedding_function=embeddings, client_settings=CHROMA_SETTINGS)
+ collection = db.get()
+ texts = process_documents([metadata['source'] for metadata in collection['metadatas']])
+ print(f"Creating embeddings. May take some minutes...")
+ db.add_documents(texts)
+ else:
+ # Create and store locally vectorstore
+ print("Creating new vectorstore")
+ texts = process_documents()
+ print(f"Creating embeddings. May take some minutes...")
+ db = Chroma.from_documents(texts, embeddings, persist_directory=persist_directory)
+ db.persist()
+ db = None
+
+ print(f"Ingestion complete! You can now run privateGPT.py to query your documents")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ollama/examples/langchain-python-rag-privategpt/poetry.lock b/ollama/examples/langchain-python-rag-privategpt/poetry.lock
new file mode 100755
index 0000000000000000000000000000000000000000..f02b1c5b9e672b43f646769853dc141cf7d055b0
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/poetry.lock
@@ -0,0 +1,3833 @@
+# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+
+[[package]]
+name = "aiohttp"
+version = "3.8.4"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"},
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"},
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"},
+ {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"},
+ {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"},
+ {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"},
+ {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"},
+ {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"},
+ {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"},
+ {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"},
+ {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"},
+ {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<4.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "anyio"
+version = "3.7.1"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"},
+ {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"},
+]
+
+[package.dependencies]
+exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
+idna = ">=2.8"
+sniffio = ">=1.1"
+
+[package.extras]
+doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"]
+test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
+trio = ["trio (<0.22)"]
+
+[[package]]
+name = "argilla"
+version = "0.0.1"
+description = ""
+optional = false
+python-versions = "*"
+files = [
+ {file = "argilla-0.0.1-py3-none-any.whl", hash = "sha256:8bdc3c505bcfb47ba4b91f5658034eae53bf7d4f9317980397605c0c55817396"},
+ {file = "argilla-0.0.1.tar.gz", hash = "sha256:5017854754e89f573b31af25b25b803f51cea9ca1fa0bcf00505dee1f45cf7c9"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.2"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.1.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "backoff"
+version = "2.2.1"
+description = "Function decoration for backoff and retry"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
+ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.2"
+description = "Screen-scraping library"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+ {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"},
+ {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"},
+]
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
+[[package]]
+name = "certifi"
+version = "2023.5.7"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
+ {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = "*"
+files = [
+ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
+ {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
+ {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
+ {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
+ {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
+ {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
+ {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
+ {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
+ {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
+ {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
+ {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
+ {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
+ {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
+ {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
+ {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
+ {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
+ {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
+ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
+ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "chardet"
+version = "5.1.0"
+description = "Universal encoding detector for Python 3"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
+ {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.2.0"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
+ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
+]
+
+[[package]]
+name = "chromadb"
+version = "0.3.26"
+description = "Chroma."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "chromadb-0.3.26-py3-none-any.whl", hash = "sha256:45a7848ee3ed8b694ca5789e5fd723406b76a13fa46f9a9a769f93317f29894c"},
+ {file = "chromadb-0.3.26.tar.gz", hash = "sha256:a9b596d507f081993f2e32a7dcacabbbec2f6aebc2b6defe524442b07e265296"},
+]
+
+[package.dependencies]
+clickhouse-connect = ">=0.5.7"
+duckdb = ">=0.7.1"
+fastapi = ">=0.85.1"
+hnswlib = ">=0.7"
+numpy = ">=1.21.6"
+onnxruntime = ">=1.14.1"
+overrides = ">=7.3.1"
+pandas = ">=1.3"
+posthog = ">=2.4.0"
+pulsar-client = ">=3.1.0"
+pydantic = ">=1.9"
+requests = ">=2.28"
+tokenizers = ">=0.13.2"
+tqdm = ">=4.65.0"
+typing-extensions = ">=4.5.0"
+uvicorn = {version = ">=0.18.3", extras = ["standard"]}
+
+[[package]]
+name = "click"
+version = "8.1.4"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"},
+ {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "clickhouse-connect"
+version = "0.6.6"
+description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
+optional = false
+python-versions = "~=3.7"
+files = [
+ {file = "clickhouse-connect-0.6.6.tar.gz", hash = "sha256:28d261b95fe9818f4d8bc4ad48087cbff3c9f0b6574ff04d234ed5bca6619474"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:31187a9947f5771c9e2a4c5d5c33d8c42f1c0f83b1223277c8faf47da0fcd1dc"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1e1713d1f9f294c0cf05ded6f7eff227dde2b19f0d19423fbbeb05fbf5d7c484"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:961c463de6f0de93fc11f1c1f81efc1ec5b5895481cfdf79b3f832e0e242e7e1"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18900f1a13b3b120252fc3583ca1e0fc4d3a33ea98fcf63d33d168a469561056"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4cbbea1a943e742ea649c82f85109b9a9928e61b038923de2813977966acd76"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2714ab61f063a65419278b97f8785ce2440fdb1ef46d9a6703cef9cd38517521"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:823756569f6bea58ff9286cf494abaca5db8652e33ee4a6e7ecb40efbf945088"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11aff145aacfae92b941b95ec5943fb62ea241ec2225b8ecefc4cadadf699893"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-win32.whl", hash = "sha256:4f5f9e3dcece211dc711088a5b264e66e8198b878bdf99619a3a7c54976c118d"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:8268927ef8d476ef4c81d9562d049f38bc534c4d1d441e072cf8428f08ff6eaa"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f9cb2ebe0deaa78c942888aad32fa42beb4e75c2377e8784baf3d737c23e5f1"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d502b7f35008facf2774f411eed6b35010923acaac254a8c5683fdf8a11abd62"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e0f2afe464be0947947d98482eb12b25be8857ae1a31c1aaa17a67f616174d"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f2c517943eeb7663a9d42bd9b737b8ec5513ddcf58f2372f8b2074a315bae2"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa6c2b488cf9558c2b71a2599d812fe4368d5199edaa011731a8bc7bfe019751"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df9e80d0b3f5614d38026e7e2e7e7412dec942df8d765c082177879b37e678e2"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a20351fb2ae47aac1ae9b1de0585949616baedd6dbdee5272f466a2aea6ec4dd"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af40eaa20998d96198563748a6fd9796843b6f22e9e95b2136aabd917db33fff"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-win32.whl", hash = "sha256:9591a9bfa58ace467544227f83226b22a1554e2db4cfcf658f25f43c9d94e960"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b6f6159f8eddb0cad4d7e0cbad5944e97e0146ee9f416fc663f7bd3d4e9ea46"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8b941c85fe9ddd5e5edf6fc7458563d9e51ad900d95fe0b87b0458be166693a1"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c642696a758fa726c86ca624dd40acded100d79a9f4bd9f5b56ba0ea4dc44099"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57b6b36b316451c1bdc4450f9418c017af84af57d52d03cd4deb85480819a934"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17cfb1d103b47350c3ba824641fb5ba730e6e29274077a6f8975a3394a1abadb"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d52c7e7560666b93c078bf082e4ed87689fd283e6295a6d8d1dd491d4d7b6072"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0a6d498b689aa09e9d1b0051480a04ecc3509002f54bfb82998d030b4675bb24"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28c876f7a4713662af2ded7350a0262756ec4da9262bb76cc85cfe2e88015b74"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-win32.whl", hash = "sha256:74bf0a95c7c5644948be0ba9c0abcad7615b806fd2545501862526dbe684db71"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0aaa4194d11cb7513de69b791911ff60b3ad8b86f125446a37347208e9b9ae6d"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b873d138dfedbe761f2d66ad1257ea253394c4f8dcffd6ff34dfb990f13a18b"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7140705d05a05ac39eecf86727ab55985e5dba9d1734df8921cc417853a18b7f"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69085fa0f4e5da5cef4ae5249e19f10d91e57ae78628e49e8853b71b6003dbae"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6ec081d87cc37be3ecf60b88002c58add76a72b4124525cb5cd28539e7d488"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe04eb239b72bc9fa4f1999cd292f82af507cbe1f07546f26a3332c50a294b"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:244bbf7ad92f1f030378412358c47cd377aa6d469b548dba2406a7894c8da2ab"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:69e91bdb25166b6fa4eb55601d86fa57dee82070bce9b97a858c8973615ab8b8"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d2627c8a9625e1c9058cfb5b231a0d0180ed9215d901b601d367de598f27a90d"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-win32.whl", hash = "sha256:87fb937b34b561703eaba5781404736120bab691f4525096d5dfb4b99d4890a6"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:366c5765e6b7863b3a8d565d5a3b27f9f8731f6f4b016048fa172c6ad6485594"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c1b0d8bee6399f5b68bb0832fae51fd0f5e4bcb539bae2df36d8433b6e38a0b"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f7e3ead1429ec82b9cd0cf7b807bacf69d895042f75276f63d732378344376"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36df02ebfbfa4dbe3667bf5b3402ff0193d0f682b9aa09d71469c15745473d8e"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa636b0cbbff52c9fafe287d1d818fc9947feaa840c951b8bfd8f8d4d1ee45a0"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4968b6b48baae43d62c241bee9e1c8f680ee3d054254e3959c2d2fb7d370ee"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a4156de52fe1f9b19f8c3a820d57c012a55644c56a87c8d31ecff89115959d60"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fccbe34878e6202ff5715284cbe57e748d36f4c8ad6217f9c80f84a086013fb9"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:70bfe48c0e4340ccf234b691fbd52f32db74649cb84ca28b98a211cc3e30b30c"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-win32.whl", hash = "sha256:9f80b64e2268293a918721e1c122c54e2a1592bb74824fdd70e9add9fbcea31a"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:04a5030b76ee930b18eb3aeb7847146c2fa29da0feb0ec7dd3a0564a3de944f1"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:75e84c827c8180d5dc66b0e99dba422a3ffd2c7d8ee5ba80e00b9c942dff8a36"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e627061336142d02e9c900a96bcd87372e88f05755bf19b158e68472b99a921"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:194f72e8f3f24c207aa87113b8d11674dab12b35232fd8b7b19b97257796be45"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf755b46089ee6a7f1ab3e24fc6fbacefc54cfefceb0ed81ebf198abf6937dac"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:39e58756a13872a24304b1987fafb7d5112ea88469eb55303b1183ebdd7a0be5"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e29de1264ffa26eb822e57c5715974c9818ae8e16bb114e54352d66947cdf7f"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74ed74427aaf10d2e8f7697b8ec53479f6068287ea695a5f3d3927db40be3c3"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc910b0f6c93d0d703809fd92cf19b71dcaf8c6d5f328deddae1709061a0aa2"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b17236e08da8b5d737ccd983db56a2d2222955a49c4b312b12e4a2b4a06c9b"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d4d76560d0ce84d0ba550918433dd1f8da6983edabe2685cd84679cd7a90c179"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:815bd0d5f40174716ffdf1adab066cd0e36c82c81b227224fb7281bdf8734eb6"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82abd319ba51e0c5c2d123e2cf30b1604b0d46f4de694096aa911ddd63701f60"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3eea5dac3a7cd52523b556ecd05940c4710c96b6e39ec5a05ed7859bddc7f6"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bbc28cdf903b4b2805199ce7d4580814a8b9bb4766ddd835cab46a81e6fcd63"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5fc4deda5a97e672135b4330d81109b443266aa948b09a24a02db58c0fc96bc1"},
+]
+
+[package.dependencies]
+certifi = "*"
+importlib-metadata = "*"
+lz4 = "*"
+pytz = "*"
+urllib3 = ">=1.26"
+zstandard = "*"
+
+[package.extras]
+arrow = ["pyarrow"]
+numpy = ["numpy"]
+orjson = ["orjson"]
+pandas = ["pandas"]
+sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "colorclass"
+version = "2.2.2"
+description = "Colorful worry-free console applications for Linux, Mac OS X, and Windows."
+optional = false
+python-versions = ">=2.6"
+files = [
+ {file = "colorclass-2.2.2-py2.py3-none-any.whl", hash = "sha256:6f10c273a0ef7a1150b1120b6095cbdd68e5cf36dfd5d0fc957a2500bbf99a55"},
+ {file = "colorclass-2.2.2.tar.gz", hash = "sha256:6d4fe287766166a98ca7bc6f6312daf04a0481b1eda43e7173484051c0ab4366"},
+]
+
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+description = "Colored terminal output for Python's logging module"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+ {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
+
+[package.dependencies]
+humanfriendly = ">=9.1"
+
+[package.extras]
+cron = ["capturer (>=2.4)"]
+
+[[package]]
+name = "compressed-rtf"
+version = "1.0.6"
+description = "Compressed Rich Text Format (RTF) compression and decompression package"
+optional = false
+python-versions = "*"
+files = [
+ {file = "compressed_rtf-1.0.6.tar.gz", hash = "sha256:c1c827f1d124d24608981a56e8b8691eb1f2a69a78ccad6440e7d92fde1781dd"},
+]
+
+[[package]]
+name = "cryptography"
+version = "41.0.1"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"},
+ {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"},
+ {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"},
+ {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"},
+ {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"},
+ {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"},
+ {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"},
+ {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"},
+ {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"},
+ {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"},
+ {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"},
+ {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"},
+ {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"},
+ {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"},
+ {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"},
+ {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"},
+ {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"},
+ {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"},
+ {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"},
+]
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+nox = ["nox"]
+pep8test = ["black", "check-sdist", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "dataclasses-json"
+version = "0.5.9"
+description = "Easily serialize dataclasses to and from JSON"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "dataclasses-json-0.5.9.tar.gz", hash = "sha256:e9ac87b73edc0141aafbce02b44e93553c3123ad574958f0fe52a534b6707e8e"},
+ {file = "dataclasses_json-0.5.9-py3-none-any.whl", hash = "sha256:1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c"},
+]
+
+[package.dependencies]
+marshmallow = ">=3.3.0,<4.0.0"
+marshmallow-enum = ">=1.5.1,<2.0.0"
+typing-inspect = ">=0.4.0"
+
+[package.extras]
+dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (>=7.2.0)", "setuptools", "simplejson", "twine", "types-dataclasses", "wheel"]
+
+[[package]]
+name = "duckdb"
+version = "0.8.1"
+description = "DuckDB embedded database"
+optional = false
+python-versions = "*"
+files = [
+ {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:14781d21580ee72aba1f5dcae7734674c9b6c078dd60470a08b2b420d15b996d"},
+ {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13bf7ab0e56ddd2014ef762ae4ee5ea4df5a69545ce1191b8d7df8118ba3167"},
+ {file = "duckdb-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4032042d8363e55365bbca3faafc6dc336ed2aad088f10ae1a534ebc5bcc181"},
+ {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a71bd8f0b0ca77c27fa89b99349ef22599ffefe1e7684ae2e1aa2904a08684"},
+ {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24568d6e48f3dbbf4a933109e323507a46b9399ed24c5d4388c4987ddc694fd0"},
+ {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297226c0dadaa07f7c5ae7cbdb9adba9567db7b16693dbd1b406b739ce0d7924"},
+ {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5792cf777ece2c0591194006b4d3e531f720186102492872cb32ddb9363919cf"},
+ {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:12803f9f41582b68921d6b21f95ba7a51e1d8f36832b7d8006186f58c3d1b344"},
+ {file = "duckdb-0.8.1-cp310-cp310-win32.whl", hash = "sha256:d0953d5a2355ddc49095e7aef1392b7f59c5be5cec8cdc98b9d9dc1f01e7ce2b"},
+ {file = "duckdb-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e6583c98a7d6637e83bcadfbd86e1f183917ea539f23b6b41178f32f813a5eb"},
+ {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fad7ed0d4415f633d955ac24717fa13a500012b600751d4edb050b75fb940c25"},
+ {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ae602f34d38d9c48dd60f94b89f28df3ef346830978441b83c5b4eae131d08"},
+ {file = "duckdb-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d75cfe563aaa058d3b4ccaaa371c6271e00e3070df5de72361fd161b2fe6780"},
+ {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbb55e7a3336f2462e5e916fc128c47fe1c03b6208d6bd413ac11ed95132aa0"},
+ {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6df53efd63b6fdf04657385a791a4e3c4fb94bfd5db181c4843e2c46b04fef5"},
+ {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b188b80b70d1159b17c9baaf541c1799c1ce8b2af4add179a9eed8e2616be96"},
+ {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ad481ee353f31250b45d64b4a104e53b21415577943aa8f84d0af266dc9af85"},
+ {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1d1b1729993611b1892509d21c21628917625cdbe824a61ce891baadf684b32"},
+ {file = "duckdb-0.8.1-cp311-cp311-win32.whl", hash = "sha256:2d8f9cc301e8455a4f89aa1088b8a2d628f0c1f158d4cf9bc78971ed88d82eea"},
+ {file = "duckdb-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:07457a43605223f62d93d2a5a66b3f97731f79bbbe81fdd5b79954306122f612"},
+ {file = "duckdb-0.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2c8062c3e978dbcd80d712ca3e307de8a06bd4f343aa457d7dd7294692a3842"},
+ {file = "duckdb-0.8.1-cp36-cp36m-win32.whl", hash = "sha256:fad486c65ae944eae2de0d590a0a4fb91a9893df98411d66cab03359f9cba39b"},
+ {file = "duckdb-0.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:86fa4506622c52d2df93089c8e7075f1c4d0ba56f4bf27faebde8725355edf32"},
+ {file = "duckdb-0.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:60e07a62782f88420046e30cc0e3de842d0901c4fd5b8e4d28b73826ec0c3f5e"},
+ {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18563675977f8cbf03748efee0165b4c8ef64e0cbe48366f78e2914d82138bb"},
+ {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16e179443832bea8439ae4dff93cf1e42c545144ead7a4ef5f473e373eea925a"},
+ {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a413d5267cb41a1afe69d30dd6d4842c588256a6fed7554c7e07dad251ede095"},
+ {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3784680df59eadd683b0a4c2375d451a64470ca54bd171c01e36951962b1d332"},
+ {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:67a1725c2b01f9b53571ecf3f92959b652f60156c1c48fb35798302e39b3c1a2"},
+ {file = "duckdb-0.8.1-cp37-cp37m-win32.whl", hash = "sha256:197d37e2588c5ad063e79819054eedb7550d43bf1a557d03ba8f8f67f71acc42"},
+ {file = "duckdb-0.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3843feb79edf100800f5037c32d5d5a5474fb94b32ace66c707b96605e7c16b2"},
+ {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:624c889b0f2d656794757b3cc4fc58030d5e285f5ad2ef9fba1ea34a01dab7fb"},
+ {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fcbe3742d77eb5add2d617d487266d825e663270ef90253366137a47eaab9448"},
+ {file = "duckdb-0.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47516c9299d09e9dbba097b9fb339b389313c4941da5c54109df01df0f05e78c"},
+ {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf1ba718b7522d34399446ebd5d4b9fcac0b56b6ac07bfebf618fd190ec37c1d"},
+ {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e36e35d38a9ae798fe8cf6a839e81494d5b634af89f4ec9483f4d0a313fc6bdb"},
+ {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23493313f88ce6e708a512daacad13e83e6d1ea0be204b175df1348f7fc78671"},
+ {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1fb9bf0b6f63616c8a4b9a6a32789045e98c108df100e6bac783dc1e36073737"},
+ {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12fc13ecd5eddd28b203b9e3999040d3a7374a8f4b833b04bd26b8c5685c2635"},
+ {file = "duckdb-0.8.1-cp38-cp38-win32.whl", hash = "sha256:a12bf4b18306c9cb2c9ba50520317e6cf2de861f121d6f0678505fa83468c627"},
+ {file = "duckdb-0.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e4e809358b9559c00caac4233e0e2014f3f55cd753a31c4bcbbd1b55ad0d35e4"},
+ {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7acedfc00d97fbdb8c3d120418c41ef3cb86ef59367f3a9a30dff24470d38680"},
+ {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:99bfe264059cdc1e318769103f656f98e819cd4e231cd76c1d1a0327f3e5cef8"},
+ {file = "duckdb-0.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:538b225f361066231bc6cd66c04a5561de3eea56115a5dd773e99e5d47eb1b89"},
+ {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae0be3f71a18cd8492d05d0fc1bc67d01d5a9457b04822d025b0fc8ee6efe32e"},
+ {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd82ba63b58672e46c8ec60bc9946aa4dd7b77f21c1ba09633d8847ad9eb0d7b"},
+ {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:780a34559aaec8354e83aa4b7b31b3555f1b2cf75728bf5ce11b89a950f5cdd9"},
+ {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f0d4e9f7103523672bda8d3f77f440b3e0155dd3b2f24997bc0c77f8deb460"},
+ {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31f692decb98c2d57891da27180201d9e93bb470a3051fcf413e8da65bca37a5"},
+ {file = "duckdb-0.8.1-cp39-cp39-win32.whl", hash = "sha256:e7fe93449cd309bbc67d1bf6f6392a6118e94a9a4479ab8a80518742e855370a"},
+ {file = "duckdb-0.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:81d670bc6807672f038332d9bf587037aabdd741b0810de191984325ed307abd"},
+ {file = "duckdb-0.8.1.tar.gz", hash = "sha256:a54d37f4abc2afc4f92314aaa56ecf215a411f40af4bffe1e86bd25e62aceee9"},
+]
+
+[[package]]
+name = "easygui"
+version = "0.98.3"
+description = "EasyGUI is a module for very simple, very easy GUI programming in Python. EasyGUI is different from other GUI generators in that EasyGUI is NOT event-driven. Instead, all GUI interactions are invoked by simple function calls."
+optional = false
+python-versions = "*"
+files = [
+ {file = "easygui-0.98.3-py2.py3-none-any.whl", hash = "sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba"},
+ {file = "easygui-0.98.3.tar.gz", hash = "sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04"},
+]
+
+[[package]]
+name = "ebcdic"
+version = "1.1.1"
+description = "Additional EBCDIC codecs"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1"},
+]
+
+[[package]]
+name = "et-xmlfile"
+version = "1.1.0"
+description = "An implementation of lxml.xmlfile for the standard library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
+ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.2"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"},
+ {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "extract-msg"
+version = "0.41.5"
+description = "Extracts emails and attachments saved in Microsoft Outlook's .msg files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "extract_msg-0.41.5-py2.py3-none-any.whl", hash = "sha256:ad70dcdab3701b0fae554168c9642ad4ebef7f2ec283313c55e895a6518911e5"},
+ {file = "extract_msg-0.41.5.tar.gz", hash = "sha256:99d4fdc0c0912c836370bf9fbb6e77558bb978499c1b5fdd31634684e323885c"},
+]
+
+[package.dependencies]
+beautifulsoup4 = ">=4.11.1,<4.13"
+chardet = ">=4.0.0,<6"
+compressed-rtf = ">=1.0.6,<2"
+ebcdic = ">=1.1.1,<2"
+imapclient = ">=2.3.0,<3"
+olefile = "0.46"
+red-black-tree-mod = "1.20"
+RTFDE = "0.0.2"
+tzlocal = ">=4.2,<6"
+
+[package.extras]
+all = ["extract-msg[mime]"]
+mime = ["python-magic (>=0.4.27,<0.5)"]
+
+[[package]]
+name = "fastapi"
+version = "0.100.0"
+description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "fastapi-0.100.0-py3-none-any.whl", hash = "sha256:271662daf986da8fa98dc2b7c7f61c4abdfdccfb4786d79ed8b2878f172c6d5f"},
+ {file = "fastapi-0.100.0.tar.gz", hash = "sha256:acb5f941ea8215663283c10018323ba7ea737c571b67fc7e88e9469c7eb1d12e"},
+]
+
+[package.dependencies]
+pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<3.0.0"
+starlette = ">=0.27.0,<0.28.0"
+typing-extensions = ">=4.5.0"
+
+[package.extras]
+all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+
+[[package]]
+name = "filelock"
+version = "3.12.2"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"},
+ {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "filetype"
+version = "1.2.0"
+description = "Infer file type and MIME type of any file/buffer. No external dependencies."
+optional = false
+python-versions = "*"
+files = [
+ {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"},
+ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
+]
+
+[[package]]
+name = "flatbuffers"
+version = "23.5.26"
+description = "The FlatBuffers serialization format for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"},
+ {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"},
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.3.3"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
+ {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+]
+
+[[package]]
+name = "fsspec"
+version = "2023.6.0"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"},
+ {file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+devel = ["pytest", "pytest-cov"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+tqdm = ["tqdm"]
+
+[[package]]
+name = "gpt4all"
+version = "1.0.3"
+description = "Python bindings for GPT4All"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "gpt4all-1.0.3-py3-none-macosx_10_9_universal2.whl", hash = "sha256:11bbc8bdb183b100b57e3e8e0c67650cd84e49d9b875dd15c8bb26cfcf72988d"},
+ {file = "gpt4all-1.0.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:75248b2f160bd7834b807196395d51792cd3e41969c870a418566d1007ec5c83"},
+ {file = "gpt4all-1.0.3-py3-none-win_amd64.whl", hash = "sha256:dcb4901f6320d938bc267ee0b28eaee8676221d22ecbf31b00246d0b4973a31a"},
+]
+
+[package.dependencies]
+requests = "*"
+tqdm = "*"
+
+[package.extras]
+dev = ["black", "isort", "mkautodoc", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings[python]", "pytest", "setuptools", "twine", "wheel"]
+
+[[package]]
+name = "greenlet"
+version = "2.0.2"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
+files = [
+ {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
+ {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
+ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
+ {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
+ {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
+ {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
+ {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
+ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
+ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
+ {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
+ {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
+ {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
+ {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
+ {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
+ {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
+ {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
+ {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
+ {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
+ {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
+ {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
+ {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
+ {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
+ {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
+ {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
+ {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
+ {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
+ {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
+ {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
+ {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
+ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
+ {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
+ {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
+ {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
+ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
+ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
+ {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
+ {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
+ {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
+ {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
+ {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
+ {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
+ {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
+ {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
+ {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
+]
+
+[package.extras]
+docs = ["Sphinx", "docutils (<0.18)"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "hnswlib"
+version = "0.7.0"
+description = "hnswlib"
+optional = false
+python-versions = "*"
+files = [
+ {file = "hnswlib-0.7.0.tar.gz", hash = "sha256:bc459668e7e44bb7454b256b90c98c5af750653919d9a91698dafcf416cf64c4"},
+]
+
+[package.dependencies]
+numpy = "*"
+
+[[package]]
+name = "httptools"
+version = "0.6.0"
+description = "A collection of framework independent HTTP protocol utils."
+optional = false
+python-versions = ">=3.5.0"
+files = [
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"},
+ {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"},
+ {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"},
+ {file = "httptools-0.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4"},
+ {file = "httptools-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"},
+ {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"},
+ {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"},
+ {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"},
+]
+
+[package.extras]
+test = ["Cython (>=0.29.24,<0.30.0)"]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.16.4"
+description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "huggingface_hub-0.16.4-py3-none-any.whl", hash = "sha256:0d3df29932f334fead024afc7cb4cc5149d955238b8b5e42dcf9740d6995a349"},
+ {file = "huggingface_hub-0.16.4.tar.gz", hash = "sha256:608c7d4f3d368b326d1747f91523dbd1f692871e8e2e7a4750314a2dd8b63e14"},
+]
+
+[package.dependencies]
+filelock = "*"
+fsspec = "*"
+packaging = ">=20.9"
+pyyaml = ">=5.1"
+requests = "*"
+tqdm = ">=4.42.1"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"]
+cli = ["InquirerPy (==0.3.4)"]
+dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"]
+fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
+inference = ["aiohttp", "pydantic"]
+quality = ["black (>=23.1,<24.0)", "mypy (==0.982)", "ruff (>=0.0.241)"]
+tensorflow = ["graphviz", "pydot", "tensorflow"]
+testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
+torch = ["torch"]
+typing = ["pydantic", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"]
+
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+description = "Human friendly output for text interfaces using Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+ {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+
+[package.dependencies]
+pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""}
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "imapclient"
+version = "2.3.1"
+description = "Easy-to-use, Pythonic and complete IMAP client library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "IMAPClient-2.3.1-py2.py3-none-any.whl", hash = "sha256:057f28025d2987c63e065afb0e4370b0b850b539b0e1494cea0427e88130108c"},
+ {file = "IMAPClient-2.3.1.zip", hash = "sha256:26ea995664fae3a88b878ebce2aff7402931697b86658b7882043ddb01b0e6ba"},
+]
+
+[package.dependencies]
+six = "*"
+
+[package.extras]
+doc = ["sphinx"]
+test = ["mock (>=1.3.0)"]
+
+[[package]]
+name = "importlib-metadata"
+version = "6.8.0"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
+ {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "joblib"
+version = "1.3.1"
+description = "Lightweight pipelining with Python functions"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "joblib-1.3.1-py3-none-any.whl", hash = "sha256:89cf0529520e01b3de7ac7b74a8102c90d16d54c64b5dd98cafcd14307fdf915"},
+ {file = "joblib-1.3.1.tar.gz", hash = "sha256:1f937906df65329ba98013dc9692fe22a4c5e4a648112de500508b18a21b41e3"},
+]
+
+[[package]]
+name = "langchain"
+version = "0.0.261"
+description = "Building applications with LLMs through composability"
+optional = false
+python-versions = ">=3.8.1,<4.0"
+files = [
+ {file = "langchain-0.0.261-py3-none-any.whl", hash = "sha256:d2aa7c48c62e9febd7440d06eb067066ce2623db6d8b367f2742c9a78c315ce8"},
+ {file = "langchain-0.0.261.tar.gz", hash = "sha256:1ec501b8323811bf8fa2db10b7703a654c57235646344eefdd0bae764c9e4335"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.8.3,<4.0.0"
+async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
+dataclasses-json = ">=0.5.7,<0.6.0"
+langsmith = ">=0.0.11,<0.1.0"
+numexpr = ">=2.8.4,<3.0.0"
+numpy = ">=1,<2"
+openapi-schema-pydantic = ">=1.2,<2.0"
+pydantic = ">=1,<2"
+PyYAML = ">=5.3"
+requests = ">=2,<3"
+SQLAlchemy = ">=1.4,<3"
+tenacity = ">=8.1.0,<9.0.0"
+
+[package.extras]
+all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "anthropic (>=0.3,<0.4)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=0.11.0,<0.12.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)", "xinference (>=0.0.6,<0.0.7)"]
+azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b6)", "openai (>=0,<1)"]
+clarifai = ["clarifai (>=9.1.0)"]
+cohere = ["cohere (>=4,<5)"]
+docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
+embeddings = ["sentence-transformers (>=2,<3)"]
+extended-testing = ["amazon-textract-caller (<2)", "anthropic (>=0.3,<0.4)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xinference (>=0.0.6,<0.0.7)", "xmltodict (>=0.13.0,<0.14.0)", "zep-python (>=0.32)"]
+javascript = ["esprima (>=4.0.1,<5.0.0)"]
+llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)", "xinference (>=0.0.6,<0.0.7)"]
+openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
+qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
+scheduled-testing = ["openai (>=0,<1)"]
+text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
+
+[[package]]
+name = "langsmith"
+version = "0.0.21"
+description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
+optional = false
+python-versions = ">=3.8.1,<4.0"
+files = [
+ {file = "langsmith-0.0.21-py3-none-any.whl", hash = "sha256:a04c6eb3b4fc6205b15a559705f726fd0114ee2b3bd8668a0bd11cf29d5c5992"},
+ {file = "langsmith-0.0.21.tar.gz", hash = "sha256:ec90ddab6beee6c344cf0ed8ae7d68948740cf98e119dd97c571f3190555644e"},
+]
+
+[package.dependencies]
+pydantic = ">=1,<2"
+requests = ">=2,<3"
+
+[[package]]
+name = "lark-parser"
+version = "0.12.0"
+description = "a modern parsing library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lark-parser-0.12.0.tar.gz", hash = "sha256:15967db1f1214013dca65b1180745047b9be457d73da224fcda3d9dd4e96a138"},
+ {file = "lark_parser-0.12.0-py2.py3-none-any.whl", hash = "sha256:0eaf30cb5ba787fe404d73a7d6e61df97b21d5a63ac26c5008c78a494373c675"},
+]
+
+[package.extras]
+atomic-cache = ["atomicwrites"]
+nearley = ["js2py"]
+regex = ["regex"]
+
+[[package]]
+name = "lxml"
+version = "4.9.3"
+description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
+files = [
+ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"},
+ {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"},
+ {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"},
+ {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"},
+ {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"},
+ {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"},
+ {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"},
+ {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"},
+ {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"},
+ {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"},
+ {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"},
+ {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"},
+ {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"},
+ {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"},
+ {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"},
+ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"},
+ {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"},
+ {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"},
+ {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"},
+ {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"},
+ {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"},
+ {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"},
+ {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"},
+ {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"},
+ {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"},
+ {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"},
+ {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"},
+ {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"},
+ {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"},
+ {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"},
+ {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"},
+ {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"},
+ {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"},
+ {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"},
+ {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"},
+ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"},
+ {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"},
+ {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"},
+ {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"},
+ {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"},
+ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"},
+ {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"},
+ {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"},
+ {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"},
+ {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"},
+ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"},
+ {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"},
+ {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"},
+ {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"},
+ {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"},
+ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"},
+ {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"},
+ {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"},
+ {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"},
+ {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"},
+ {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"},
+ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"},
+ {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"},
+ {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"},
+ {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"},
+ {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"},
+ {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"},
+ {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"},
+ {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"},
+ {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"},
+ {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"},
+ {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"},
+ {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"},
+ {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"},
+ {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"},
+ {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"},
+ {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"},
+]
+
+[package.extras]
+cssselect = ["cssselect (>=0.7)"]
+html5 = ["html5lib"]
+htmlsoup = ["BeautifulSoup4"]
+source = ["Cython (>=0.29.35)"]
+
+[[package]]
+name = "lz4"
+version = "4.3.2"
+description = "LZ4 Bindings for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "lz4-4.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c4c100d99eed7c08d4e8852dd11e7d1ec47a3340f49e3a96f8dfbba17ffb300"},
+ {file = "lz4-4.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edd8987d8415b5dad25e797043936d91535017237f72fa456601be1479386c92"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7c50542b4ddceb74ab4f8b3435327a0861f06257ca501d59067a6a482535a77"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5614d8229b33d4a97cb527db2a1ac81308c6e796e7bdb5d1309127289f69d5"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f00a9ba98f6364cadda366ae6469b7b3568c0cced27e16a47ddf6b774169270"},
+ {file = "lz4-4.3.2-cp310-cp310-win32.whl", hash = "sha256:b10b77dc2e6b1daa2f11e241141ab8285c42b4ed13a8642495620416279cc5b2"},
+ {file = "lz4-4.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:86480f14a188c37cb1416cdabacfb4e42f7a5eab20a737dac9c4b1c227f3b822"},
+ {file = "lz4-4.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c2df117def1589fba1327dceee51c5c2176a2b5a7040b45e84185ce0c08b6a3"},
+ {file = "lz4-4.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f25eb322eeb24068bb7647cae2b0732b71e5c639e4e4026db57618dcd8279f0"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8df16c9a2377bdc01e01e6de5a6e4bbc66ddf007a6b045688e285d7d9d61d1c9"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f571eab7fec554d3b1db0d666bdc2ad85c81f4b8cb08906c4c59a8cad75e6e22"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7211dc8f636ca625abc3d4fb9ab74e5444b92df4f8d58ec83c8868a2b0ff643d"},
+ {file = "lz4-4.3.2-cp311-cp311-win32.whl", hash = "sha256:867664d9ca9bdfce840ac96d46cd8838c9ae891e859eb98ce82fcdf0e103a947"},
+ {file = "lz4-4.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a6a46889325fd60b8a6b62ffc61588ec500a1883db32cddee9903edfba0b7584"},
+ {file = "lz4-4.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a85b430138882f82f354135b98c320dafb96fc8fe4656573d95ab05de9eb092"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d5c93f8badacfa0456b660285e394e65023ef8071142e0dcbd4762166e1be0"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b50f096a6a25f3b2edca05aa626ce39979d63c3b160687c8c6d50ac3943d0ba"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200d05777d61ba1ff8d29cb51c534a162ea0b4fe6d3c28be3571a0a48ff36080"},
+ {file = "lz4-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:edc2fb3463d5d9338ccf13eb512aab61937be50aa70734bcf873f2f493801d3b"},
+ {file = "lz4-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:83acfacab3a1a7ab9694333bcb7950fbeb0be21660d236fd09c8337a50817897"},
+ {file = "lz4-4.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a9eec24ec7d8c99aab54de91b4a5a149559ed5b3097cf30249b665689b3d402"},
+ {file = "lz4-4.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31d72731c4ac6ebdce57cd9a5cabe0aecba229c4f31ba3e2c64ae52eee3fdb1c"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83903fe6db92db0be101acedc677aa41a490b561567fe1b3fe68695b2110326c"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:926b26db87ec8822cf1870efc3d04d06062730ec3279bbbd33ba47a6c0a5c673"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e05afefc4529e97c08e65ef92432e5f5225c0bb21ad89dee1e06a882f91d7f5e"},
+ {file = "lz4-4.3.2-cp38-cp38-win32.whl", hash = "sha256:ad38dc6a7eea6f6b8b642aaa0683253288b0460b70cab3216838747163fb774d"},
+ {file = "lz4-4.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:7e2dc1bd88b60fa09b9b37f08553f45dc2b770c52a5996ea52b2b40f25445676"},
+ {file = "lz4-4.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:edda4fb109439b7f3f58ed6bede59694bc631c4b69c041112b1b7dc727fffb23"},
+ {file = "lz4-4.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ca83a623c449295bafad745dcd399cea4c55b16b13ed8cfea30963b004016c9"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5ea0e788dc7e2311989b78cae7accf75a580827b4d96bbaf06c7e5a03989bd5"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a98b61e504fb69f99117b188e60b71e3c94469295571492a6468c1acd63c37ba"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4931ab28a0d1c133104613e74eec1b8bb1f52403faabe4f47f93008785c0b929"},
+ {file = "lz4-4.3.2-cp39-cp39-win32.whl", hash = "sha256:ec6755cacf83f0c5588d28abb40a1ac1643f2ff2115481089264c7630236618a"},
+ {file = "lz4-4.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:4caedeb19e3ede6c7a178968b800f910db6503cb4cb1e9cc9221157572139b49"},
+ {file = "lz4-4.3.2.tar.gz", hash = "sha256:e1431d84a9cfb23e6773e72078ce8e65cad6745816d4cbf9ae67da5ea419acda"},
+]
+
+[package.extras]
+docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"]
+flake8 = ["flake8"]
+tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
+
+[[package]]
+name = "markdown"
+version = "3.4.3"
+description = "Python implementation of John Gruber's Markdown."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Markdown-3.4.3-py3-none-any.whl", hash = "sha256:065fd4df22da73a625f14890dd77eb8040edcbd68794bcd35943be14490608b2"},
+ {file = "Markdown-3.4.3.tar.gz", hash = "sha256:8bf101198e004dc93e84a12a7395e31aac6a9c9942848ae1d99b9d72cf9b3520"},
+]
+
+[package.extras]
+testing = ["coverage", "pyyaml"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.3"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"},
+ {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"},
+ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+]
+
+[[package]]
+name = "marshmallow"
+version = "3.19.0"
+description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"},
+ {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"},
+]
+
+[package.dependencies]
+packaging = ">=17.0"
+
+[package.extras]
+dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"]
+docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
+lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"]
+tests = ["pytest", "pytz", "simplejson"]
+
+[[package]]
+name = "marshmallow-enum"
+version = "1.5.1"
+description = "Enum field for Marshmallow"
+optional = false
+python-versions = "*"
+files = [
+ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"},
+ {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"},
+]
+
+[package.dependencies]
+marshmallow = ">=2.0.0"
+
+[[package]]
+name = "monotonic"
+version = "1.6"
+description = "An implementation of time.monotonic() for Python 2 & < 3.3"
+optional = false
+python-versions = "*"
+files = [
+ {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
+ {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
+]
+
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+description = "Python library for arbitrary-precision floating-point arithmetic"
+optional = false
+python-versions = "*"
+files = [
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
+]
+
+[package.extras]
+develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
+docs = ["sphinx"]
+gmpy = ["gmpy2 (>=2.1.0a4)"]
+tests = ["pytest (>=4.6)"]
+
+[[package]]
+name = "msg-parser"
+version = "1.2.0"
+description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files."
+optional = false
+python-versions = ">=3.4"
+files = [
+ {file = "msg_parser-1.2.0-py2.py3-none-any.whl", hash = "sha256:d47a2f0b2a359cb189fad83cc991b63ea781ecc70d91410324273fbf93e95375"},
+ {file = "msg_parser-1.2.0.tar.gz", hash = "sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66"},
+]
+
+[package.dependencies]
+olefile = ">=0.46"
+
+[package.extras]
+rtf = ["compressed-rtf (>=1.0.5)"]
+
+[[package]]
+name = "msoffcrypto-tool"
+version = "5.0.1"
+description = "Python tool and library for decrypting MS Office files with passwords or other keys"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "msoffcrypto_tool-5.0.1-py3-none-any.whl", hash = "sha256:2b489c8a2b13bec07b94c8f5ce9054111dec3223ff8bedfd486cae3c299be54b"},
+ {file = "msoffcrypto_tool-5.0.1.tar.gz", hash = "sha256:9efd0ef5cc3e086e2d175e7a5d7b2b8cb59836c896b8a486d362bbca166db645"},
+]
+
+[package.dependencies]
+cryptography = ">=35.0"
+olefile = ">=0.46"
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "networkx"
+version = "3.1"
+description = "Python package for creating and manipulating graphs and networks"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"},
+ {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"},
+]
+
+[package.extras]
+default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"]
+developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"]
+doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"]
+extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"]
+test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"]
+
+[[package]]
+name = "nltk"
+version = "3.8.1"
+description = "Natural Language Toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"},
+ {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"},
+]
+
+[package.dependencies]
+click = "*"
+joblib = "*"
+regex = ">=2021.8.3"
+tqdm = "*"
+
+[package.extras]
+all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"]
+corenlp = ["requests"]
+machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"]
+plot = ["matplotlib"]
+tgrep = ["pyparsing"]
+twitter = ["twython"]
+
+[[package]]
+name = "numexpr"
+version = "2.8.4"
+description = "Fast numerical expression evaluator for NumPy"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "numexpr-2.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a75967d46b6bd56455dd32da6285e5ffabe155d0ee61eef685bbfb8dafb2e484"},
+ {file = "numexpr-2.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db93cf1842f068247de631bfc8af20118bf1f9447cd929b531595a5e0efc9346"},
+ {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bca95f4473b444428061d4cda8e59ac564dc7dc6a1dea3015af9805c6bc2946"},
+ {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e34931089a6bafc77aaae21f37ad6594b98aa1085bb8b45d5b3cd038c3c17d9"},
+ {file = "numexpr-2.8.4-cp310-cp310-win32.whl", hash = "sha256:f3a920bfac2645017110b87ddbe364c9c7a742870a4d2f6120b8786c25dc6db3"},
+ {file = "numexpr-2.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:6931b1e9d4f629f43c14b21d44f3f77997298bea43790cfcdb4dd98804f90783"},
+ {file = "numexpr-2.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9400781553541f414f82eac056f2b4c965373650df9694286b9bd7e8d413f8d8"},
+ {file = "numexpr-2.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ee9db7598dd4001138b482342b96d78110dd77cefc051ec75af3295604dde6a"},
+ {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff5835e8af9a212e8480003d731aad1727aaea909926fd009e8ae6a1cba7f141"},
+ {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:655d84eb09adfee3c09ecf4a89a512225da153fdb7de13c447404b7d0523a9a7"},
+ {file = "numexpr-2.8.4-cp311-cp311-win32.whl", hash = "sha256:5538b30199bfc68886d2be18fcef3abd11d9271767a7a69ff3688defe782800a"},
+ {file = "numexpr-2.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:3f039321d1c17962c33079987b675fb251b273dbec0f51aac0934e932446ccc3"},
+ {file = "numexpr-2.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c867cc36cf815a3ec9122029874e00d8fbcef65035c4a5901e9b120dd5d626a2"},
+ {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:059546e8f6283ccdb47c683101a890844f667fa6d56258d48ae2ecf1b3875957"},
+ {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:845a6aa0ed3e2a53239b89c1ebfa8cf052d3cc6e053c72805e8153300078c0b1"},
+ {file = "numexpr-2.8.4-cp37-cp37m-win32.whl", hash = "sha256:a38664e699526cb1687aefd9069e2b5b9387da7feac4545de446141f1ef86f46"},
+ {file = "numexpr-2.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eaec59e9bf70ff05615c34a8b8d6c7bd042bd9f55465d7b495ea5436f45319d0"},
+ {file = "numexpr-2.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b318541bf3d8326682ebada087ba0050549a16d8b3fa260dd2585d73a83d20a7"},
+ {file = "numexpr-2.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b076db98ca65eeaf9bd224576e3ac84c05e451c0bd85b13664b7e5f7b62e2c70"},
+ {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90f12cc851240f7911a47c91aaf223dba753e98e46dff3017282e633602e76a7"},
+ {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c368aa35ae9b18840e78b05f929d3a7b3abccdba9630a878c7db74ca2368339"},
+ {file = "numexpr-2.8.4-cp38-cp38-win32.whl", hash = "sha256:b96334fc1748e9ec4f93d5fadb1044089d73fb08208fdb8382ed77c893f0be01"},
+ {file = "numexpr-2.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:a6d2d7740ae83ba5f3531e83afc4b626daa71df1ef903970947903345c37bd03"},
+ {file = "numexpr-2.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:77898fdf3da6bb96aa8a4759a8231d763a75d848b2f2e5c5279dad0b243c8dfe"},
+ {file = "numexpr-2.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df35324666b693f13a016bc7957de7cc4d8801b746b81060b671bf78a52b9037"},
+ {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ac9cfe6d0078c5fc06ba1c1bbd20b8783f28c6f475bbabd3cad53683075cab"},
+ {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3a1f6b24214a1ab826e9c1c99edf1686c8e307547a9aef33910d586f626d01"},
+ {file = "numexpr-2.8.4-cp39-cp39-win32.whl", hash = "sha256:7d71add384adc9119568d7e9ffa8a35b195decae81e0abf54a2b7779852f0637"},
+ {file = "numexpr-2.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:9f096d707290a6a00b6ffdaf581ee37331109fb7b6c8744e9ded7c779a48e517"},
+ {file = "numexpr-2.8.4.tar.gz", hash = "sha256:d5432537418d18691b9115d615d6daa17ee8275baef3edf1afbbf8bc69806147"},
+]
+
+[package.dependencies]
+numpy = ">=1.13.3"
+
+[[package]]
+name = "numpy"
+version = "1.25.1"
+description = "Fundamental package for array computing in Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"},
+ {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"},
+ {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"},
+ {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"},
+ {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"},
+ {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"},
+ {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"},
+ {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"},
+ {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"},
+ {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"},
+ {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"},
+ {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"},
+ {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"},
+ {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"},
+ {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"},
+ {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"},
+ {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"},
+ {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"},
+ {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"},
+ {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"},
+ {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"},
+ {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"},
+]
+
+[[package]]
+name = "olefile"
+version = "0.46"
+description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"},
+]
+
+[[package]]
+name = "oletools"
+version = "0.60.1"
+description = "Python tools to analyze security characteristics of MS Office and OLE files (also called Structured Storage, Compound File Binary Format or Compound Document File Format), for Malware Analysis and Incident Response #DFIR"
+optional = false
+python-versions = "*"
+files = [
+ {file = "oletools-0.60.1-py2.py3-none-any.whl", hash = "sha256:edef92374e688989a39269eb9a11142fb20a023629c23538c849c14d1d1144ea"},
+ {file = "oletools-0.60.1.zip", hash = "sha256:67a796da4c4b8e2feb9a6b2495bef8798a3323a75512de4e5669d9dc9d1fae31"},
+]
+
+[package.dependencies]
+colorclass = "*"
+easygui = "*"
+msoffcrypto-tool = {version = "*", markers = "platform_python_implementation != \"PyPy\" or python_version >= \"3\" and (platform_system != \"Windows\" and platform_system != \"Darwin\")"}
+olefile = ">=0.46"
+pcodedmp = ">=1.2.5"
+pyparsing = ">=2.1.0,<3"
+
+[package.extras]
+full = ["XLMMacroDeobfuscator"]
+
+[[package]]
+name = "onnxruntime"
+version = "1.15.1"
+description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
+optional = false
+python-versions = "*"
+files = [
+ {file = "onnxruntime-1.15.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:baad59e6a763237fa39545325d29c16f98b8a45d2dfc524c67631e2e3ba44d16"},
+ {file = "onnxruntime-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:568c2db848f619a0a93e843c028e9fb4879929d40b04bd60f9ba6eb8d2e93421"},
+ {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69088d7784bb04dedfd9e883e2c96e4adf8ae0451acdd0abb78d68f59ecc6d9d"},
+ {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cef43737b2cd886d5d718d100f56ec78c9c476c5db5f8f946e95024978fe754"},
+ {file = "onnxruntime-1.15.1-cp310-cp310-win32.whl", hash = "sha256:79d7e65abb44a47c633ede8e53fe7b9756c272efaf169758c482c983cca98d7e"},
+ {file = "onnxruntime-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:8bc4c47682933a7a2c79808688aad5f12581305e182be552de50783b5438e6bd"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:652b2cb777f76446e3cc41072dd3d1585a6388aeff92b9de656724bc22e241e4"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89b86dbed15740abc385055a29c9673a212600248d702737ce856515bdeddc88"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5cdd9ee748149a57f4cdfa67187a0d68f75240645a3c688299dcd08742cc98"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f748cce6a70ed38c19658615c55f4eedb9192765a4e9c4bd2682adfe980698d"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-win32.whl", hash = "sha256:e0312046e814c40066e7823da58075992d51364cbe739eeeb2345ec440c3ac59"},
+ {file = "onnxruntime-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:f0980969689cb956c22bd1318b271e1be260060b37f3ddd82c7d63bd7f2d9a79"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:345986cfdbd6f4b20a89b6a6cd9abd3e2ced2926ae0b6e91fefa8149f95c0f09"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d7b3ad75e040f1e95757f69826a11051737b31584938a26d466a0234c6de98"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3603d07b829bcc1c14963a76103e257aade8861eb208173b300cc26e118ec2f8"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3df0625b9295daf1f7409ea55f72e1eeb38d54f5769add53372e79ddc3cf98d"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-win32.whl", hash = "sha256:f68b47fdf1a0406c0292f81ac993e2a2ae3e8b166b436d590eb221f64e8e187a"},
+ {file = "onnxruntime-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:52d762d297cc3f731f54fa65a3e329b813164970671547bef6414d0ed52765c9"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:99228f9f03dc1fc8af89a28c9f942e8bd3e97e894e263abe1a32e4ddb1f6363b"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:45db7f96febb0cf23e3af147f35c4f8de1a37dd252d1cef853c242c2780250cd"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bafc112a36db25c821b90ab747644041cb4218f6575889775a2c12dd958b8c3"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985693d18f2d46aa34fd44d7f65ff620660b2c8fa4b8ec365c2ca353f0fbdb27"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-win32.whl", hash = "sha256:708eb31b0c04724bf0f01c1309a9e69bbc09b85beb750e5662c8aed29f1ff9fd"},
+ {file = "onnxruntime-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:73d6de4c42dfde1e9dbea04773e6dc23346c8cda9c7e08c6554fafc97ac60138"},
+]
+
+[package.dependencies]
+coloredlogs = "*"
+flatbuffers = "*"
+numpy = ">=1.21.6"
+packaging = "*"
+protobuf = "*"
+sympy = "*"
+
+[[package]]
+name = "openapi-schema-pydantic"
+version = "1.2.4"
+description = "OpenAPI (v3) specification schema as pydantic class"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+ {file = "openapi-schema-pydantic-1.2.4.tar.gz", hash = "sha256:3e22cf58b74a69f752cc7e5f1537f6e44164282db2700cbbcd3bb99ddd065196"},
+ {file = "openapi_schema_pydantic-1.2.4-py3-none-any.whl", hash = "sha256:a932ecc5dcbb308950282088956e94dea069c9823c84e507d64f6b622222098c"},
+]
+
+[package.dependencies]
+pydantic = ">=1.8.2"
+
+[[package]]
+name = "openpyxl"
+version = "3.1.2"
+description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"},
+ {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"},
+]
+
+[package.dependencies]
+et-xmlfile = "*"
+
+[[package]]
+name = "overrides"
+version = "7.3.1"
+description = "A decorator to automatically detect mismatch when overriding a method."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"},
+ {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.1"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+]
+
+[[package]]
+name = "pandas"
+version = "2.0.3"
+description = "Powerful data structures for data analysis, time series, and statistics"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"},
+ {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"},
+ {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"},
+ {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"},
+ {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"},
+ {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"},
+ {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"},
+ {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"},
+ {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"},
+ {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"},
+ {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"},
+ {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"},
+ {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"},
+ {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"},
+ {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"},
+ {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"},
+ {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"},
+ {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"},
+ {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"},
+ {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"},
+ {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"},
+ {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"},
+ {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"},
+ {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"},
+ {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"},
+]
+
+[package.dependencies]
+numpy = [
+ {version = ">=1.21.0", markers = "python_version >= \"3.10\""},
+ {version = ">=1.23.2", markers = "python_version >= \"3.11\""},
+]
+python-dateutil = ">=2.8.2"
+pytz = ">=2020.1"
+tzdata = ">=2022.1"
+
+[package.extras]
+all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"]
+aws = ["s3fs (>=2021.08.0)"]
+clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"]
+compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"]
+computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"]
+excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"]
+feather = ["pyarrow (>=7.0.0)"]
+fss = ["fsspec (>=2021.07.0)"]
+gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"]
+hdf5 = ["tables (>=3.6.1)"]
+html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"]
+mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"]
+output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"]
+parquet = ["pyarrow (>=7.0.0)"]
+performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"]
+plot = ["matplotlib (>=3.6.1)"]
+postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"]
+spss = ["pyreadstat (>=1.1.2)"]
+sql-other = ["SQLAlchemy (>=1.4.16)"]
+test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"]
+xml = ["lxml (>=4.6.3)"]
+
+[[package]]
+name = "pandoc"
+version = "2.3"
+description = "Pandoc Documents for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pandoc-2.3.tar.gz", hash = "sha256:e772c2c6d871146894579828dbaf1efd538eb64fc7e71d4a6b3a11a18baef90d"},
+]
+
+[package.dependencies]
+plumbum = "*"
+ply = "*"
+
+[[package]]
+name = "pcodedmp"
+version = "1.2.6"
+description = "A VBA p-code disassembler"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pcodedmp-1.2.6-py2.py3-none-any.whl", hash = "sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278"},
+ {file = "pcodedmp-1.2.6.tar.gz", hash = "sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955"},
+]
+
+[package.dependencies]
+oletools = ">=0.54"
+win-unicode-console = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""}
+
+[[package]]
+name = "pdf2image"
+version = "1.16.3"
+description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"},
+ {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"},
+]
+
+[package.dependencies]
+pillow = "*"
+
+[[package]]
+name = "pdfminer-six"
+version = "20221105"
+description = "PDF parser and analyzer"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"},
+ {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"},
+]
+
+[package.dependencies]
+charset-normalizer = ">=2.0.0"
+cryptography = ">=36.0.0"
+
+[package.extras]
+dev = ["black", "mypy (==0.931)", "nox", "pytest"]
+docs = ["sphinx", "sphinx-argparse"]
+image = ["Pillow"]
+
+[[package]]
+name = "pillow"
+version = "10.0.0"
+description = "Python Imaging Library (Fork)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"},
+ {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"},
+ {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"},
+ {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"},
+ {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"},
+ {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"},
+ {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"},
+ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"},
+ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"},
+ {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"},
+ {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"},
+ {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"},
+ {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"},
+ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"},
+ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"},
+ {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"},
+ {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"},
+ {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"},
+ {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"},
+ {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"},
+ {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"},
+ {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"},
+ {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"},
+ {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"},
+ {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"},
+ {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"},
+ {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"},
+ {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"},
+]
+
+[package.extras]
+docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
+tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
+
+[[package]]
+name = "plumbum"
+version = "1.8.2"
+description = "Plumbum: shell combinators library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "plumbum-1.8.2-py3-none-any.whl", hash = "sha256:3ad9e5f56c6ec98f6f7988f7ea8b52159662ea9e915868d369dbccbfca0e367e"},
+ {file = "plumbum-1.8.2.tar.gz", hash = "sha256:9e6dc032f4af952665f32f3206567bc23b7858b1413611afe603a3f8ad9bfd75"},
+]
+
+[package.dependencies]
+pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+dev = ["paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"]
+docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"]
+ssh = ["paramiko"]
+
+[[package]]
+name = "ply"
+version = "3.11"
+description = "Python Lex & Yacc"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
+ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
+]
+
+[[package]]
+name = "posthog"
+version = "3.0.1"
+description = "Integrate PostHog into any python application."
+optional = false
+python-versions = "*"
+files = [
+ {file = "posthog-3.0.1-py2.py3-none-any.whl", hash = "sha256:9c7f92fecc713257d4b2710d05b456569c9156fbdd3e85655ba7ba5ba6c7b3ae"},
+ {file = "posthog-3.0.1.tar.gz", hash = "sha256:57d2791ff5752ce56ba0f9bb8876faf3ca9208f1c2c6ceaeb5a2504c34493767"},
+]
+
+[package.dependencies]
+backoff = ">=1.10.0"
+monotonic = ">=1.5"
+python-dateutil = ">2.1"
+requests = ">=2.7,<3.0"
+six = ">=1.5"
+
+[package.extras]
+dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"]
+sentry = ["django", "sentry-sdk"]
+test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest"]
+
+[[package]]
+name = "protobuf"
+version = "4.23.4"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"},
+ {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"},
+ {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"},
+ {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"},
+ {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"},
+ {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"},
+ {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"},
+ {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"},
+ {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"},
+ {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"},
+ {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"},
+ {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"},
+ {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"},
+]
+
+[[package]]
+name = "pulsar-client"
+version = "3.2.0"
+description = "Apache Pulsar Python client library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pulsar_client-3.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:da53bbe1903026ca1253d36a67bde0ae88513497091658aee8c5514c3e567483"},
+ {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec595a71b7a25f1a72a1350efd6680a511b53253c3cac1911ba3d6c4d71fa64c"},
+ {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3557c65463d74ec8d2864752389beb06761ab591dd134a164e0b1303c66719b"},
+ {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d51dc76fec48217489bde95754ad58288c9389361de42f5a27d64e19840d27fb"},
+ {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ef2baf85311e0fe1b98342fdafbb93a1818a08ef999eaa524234fedf6f3b941"},
+ {file = "pulsar_client-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:0928b02beda0c98e77178f4e30e962ddb8ee8c3320e4c7304a78b0796e976523"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:584f44b03474a69906be711a597a4d516263a55be31e49fc07be503dc8406821"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a637b9a3b30860c61e68a7b8ea650e0987d89e82f73b6a3df1ab662a6438fdda"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4a187fdc5febcf16f725179dcf2c476f31eeebd8353794d91754a3202dd5072"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5ff879f868cf1fd29db99f39fdb22b3ec3e749c648aca28526689756d922d1c5"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a5f85d0cc414f739a5b51d843f213b54b2cd768c3a34f7c27cca410712b1f81"},
+ {file = "pulsar_client-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:4fe748283848d829a80c0323558faeebea4c240d69fa58314ac90344f6999d17"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-macosx_10_15_universal2.whl", hash = "sha256:06b91c26def86dbbc35be15257999fd8a2afbadf32983916ea3eef44f4d4cab4"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ec897bc8d232e6b118793378fc662a844334b829a28a1b4ad1c5fe8d019135"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa37c96c25c1b5aff3bad0fd0194b385ec190b2c67a2f439ac91577f81ae18d3"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d49cdd4d1b7fc2e80d100acf14e6fd3898f6e099e403fc56ed22a690245b2fec"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0058ca3191fd24528ccf94dba6f12e4093831454a2597166f96900d0717271bf"},
+ {file = "pulsar_client-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cb69b0411008e0b56df51de0aab20aa1c1a12aef3019b9ceba89afbae1f07fe2"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:f7d33e99602352df7a30707eab4e5781654602212fb618928bffb5523f2bcf35"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad1ac15a175ca90555c681a4d0134568771c6346b97a172f3ef14006556a50ae"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369e08ef1d5cb196dd9271039928800f90b4701a9c9df90bc068b44260d2fb11"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a52ba2b6736a2ebeed31b590e75d417dda149e333461655860efa84d898a3eb4"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c801334b3b569b23976481a2922bcea0c6dd990fc26544658dd9e9c8f78ca36"},
+ {file = "pulsar_client-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd01fd419280e9013d1655bc53662248be2656b623b1506480e1a985aa7dadd2"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:0abe54d84db76435a6cd88ce27610352cabc7efae9fa3e7f874e032ec2ca0b3f"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9a1b6a806eb4819d8cbab1c4ae44ebf2110a94204a46c365f5757e1455252f2"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ea2a6b75ae0e303d522e5b57c75a4ff03dc18b9bfc14151fb14dfaf5866f17"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:be6d3a9b2e1db3b6d1a7db5e13f7b4ed420674cf072cdb520fb004c4cd54c0af"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6b733e6239ffb505f7084df0175baf9d0215f14d0a02e9bbd1fdf71a2d6ea17"},
+ {file = "pulsar_client-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:edc2135d02b4793efb086edca0ffaa6e8ac9133961c2cdc17ae487e0a53da481"},
+]
+
+[package.dependencies]
+certifi = "*"
+
+[package.extras]
+all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (==1.7.3)", "grpcio (>=1.8.2)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
+avro = ["fastavro (==1.7.3)"]
+functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.8.2)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
+[[package]]
+name = "pydantic"
+version = "1.10.11"
+description = "Data validation and settings management using python type hints"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"},
+ {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"},
+ {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"},
+ {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"},
+ {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"},
+ {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"},
+ {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"},
+ {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"},
+ {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"},
+ {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"},
+ {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"},
+ {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"},
+ {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"},
+ {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"},
+ {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"},
+ {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"},
+ {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"},
+ {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"},
+ {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"},
+ {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"},
+ {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"},
+ {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"},
+ {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"},
+ {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"},
+ {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"},
+ {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"},
+ {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"},
+ {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"},
+ {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"},
+ {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"},
+ {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"},
+ {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"},
+ {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"},
+ {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"},
+ {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"},
+ {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.2.0"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
+[[package]]
+name = "pymupdf"
+version = "1.22.5"
+description = "Python bindings for the PDF toolkit and renderer MuPDF"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "PyMuPDF-1.22.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:640b8e4cb116dd87a3c854e49808a4f63625e663a7bc5b1efc971db5b4775367"},
+ {file = "PyMuPDF-1.22.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:17efbbf0e2d99d24cfc302fac512928eb294f10b7b67d597d04dafd012812e4e"},
+ {file = "PyMuPDF-1.22.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc9b9bf0f2beea3911750d2d66247608be8cbad33b7a050cacec9e4c105a1ca"},
+ {file = "PyMuPDF-1.22.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7734a32a91eea4b502b8f9d2915cdba0a372226e14fb983876d763110dcefef"},
+ {file = "PyMuPDF-1.22.5-cp310-cp310-win32.whl", hash = "sha256:c2fd70ca9961f7871810dce1b7d0a42a69eb8ff2d786621123952bd505a6867e"},
+ {file = "PyMuPDF-1.22.5-cp310-cp310-win_amd64.whl", hash = "sha256:add310c96df6933cfb4ce3821c9c7b5c133e8aa609a4c9416e1c7af546163488"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:017aaba511526facfc928e9d95d2c10d28a2821b05b9039bf422031a7da8584e"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe5e44a14864d921fb96669a82f9635846806176f77f1d73c61feb84ebf4d84"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e74d766f79e41e10c51865233042ab2cc4612ca7942812dca0603f4d0f8f73d"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8175452fcc99a0af6429d8acd87682a3a70c5879d73532c7327f71ce508a35"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-win32.whl", hash = "sha256:42f59f4999d7f8b35c850050bd965e98c081a7d9b92d5f9dcf30203b30d06876"},
+ {file = "PyMuPDF-1.22.5-cp311-cp311-win_amd64.whl", hash = "sha256:3d71c47aa14b73f2df7d03be8c547a05df6c6898d8c63a0f752b26f206eefd3c"},
+ {file = "PyMuPDF-1.22.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4bcad7ea4b3ab82c46fe8da27ec738d38c213ed9935ef67d98ed09574d9a234e"},
+ {file = "PyMuPDF-1.22.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b04a83ddcb3f7c935c75a1f7f6050c85fe4062a2ea64c47ee6bda788d037761"},
+ {file = "PyMuPDF-1.22.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d02ee28663077f15d529b04d27588b174fa937daf73a294df279bbf70c468f5c"},
+ {file = "PyMuPDF-1.22.5-cp37-cp37m-win32.whl", hash = "sha256:411fc35f6dae16ec940b6b0406e84be6ff29f93b30908ea1427e2a4bd594d4ba"},
+ {file = "PyMuPDF-1.22.5-cp37-cp37m-win_amd64.whl", hash = "sha256:7c8c0f686865e330de90b93d53b100f7f07c2f10f5449ceb721121f459f7cc4a"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ae9f81b8fe0a3e6386a24887a92736793479c5918ecac3b7deac2d02abf1f2"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7562436dadf8382e59ac3739fbbf9d5b2d807fafc7f28cb884863430e0de6505"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c22046e5f2cf0d72f9809a967340db1b238fefe58322896bc7c3f3d1d10b42"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa601dc4116c17a6b09255b031b5a1891e3ac18b50ec536452a725a6b75db8d"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-win32.whl", hash = "sha256:3d0fe749e648f5245059d5f771fb50c1a988a1d2e82268b56377b2176a9fee5d"},
+ {file = "PyMuPDF-1.22.5-cp38-cp38-win_amd64.whl", hash = "sha256:4fbc5bfe6ecc53929e3fd0db9846fb7da084ddb4b1fc1063857245fa783974d9"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:87b36e0797ab7fbb7ef594c7a6e0febc7ffb4101a42ea796726a8288391a3769"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:01119edb7e4c3dd8c154d237b8ac927bd359eea8d31468f9a89aa308b5bca04e"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde02fcb387863873b56730f4b9f65515d87c92c12299f0f0a74b3ccdfe35062"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30c55814bbf6461aef9b34cb524d1d14857d5ec6ccfbb78ecfb1d07dfc40eeb8"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-win32.whl", hash = "sha256:0542178c3a399282903705a8cc298e7f33f4770605e0a9db344aff5d375bcf0b"},
+ {file = "PyMuPDF-1.22.5-cp39-cp39-win_amd64.whl", hash = "sha256:f8ca46a6987e14f58ec8dfda2d2376bacd113c1fec5f58bebf90838bb4408ab9"},
+ {file = "PyMuPDF-1.22.5.tar.gz", hash = "sha256:5ec8d5106752297529d0d68d46cfc4ce99914aabd99be843f1599a1842d63fe9"},
+]
+
+[[package]]
+name = "pypandoc"
+version = "1.11"
+description = "Thin wrapper for pandoc."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pypandoc-1.11-py3-none-any.whl", hash = "sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007"},
+ {file = "pypandoc-1.11.tar.gz", hash = "sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5"},
+]
+
+[[package]]
+name = "pyparsing"
+version = "2.4.7"
+description = "Python parsing module"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
+ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+]
+
+[[package]]
+name = "pyreadline3"
+version = "3.4.1"
+description = "A python implementation of GNU readline."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
+ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "python-docx"
+version = "0.8.11"
+description = "Create and update Microsoft Word .docx files."
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-docx-0.8.11.tar.gz", hash = "sha256:1105d233a0956dd8dd1e710d20b159e2d72ac3c301041b95f4d4ceb3e0ebebc4"},
+]
+
+[package.dependencies]
+lxml = ">=2.3.2"
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.0"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
+ {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "python-magic"
+version = "0.4.27"
+description = "File type identification using libmagic"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"},
+ {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
+]
+
+[[package]]
+name = "python-pptx"
+version = "0.6.21"
+description = "Generate and manipulate Open XML PowerPoint (.pptx) files"
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"},
+]
+
+[package.dependencies]
+lxml = ">=3.1.0"
+Pillow = ">=3.3.2"
+XlsxWriter = ">=0.5.7"
+
+[[package]]
+name = "pytz"
+version = "2023.3"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
+ {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+]
+
+[[package]]
+name = "pywin32"
+version = "306"
+description = "Python for Window Extensions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
+ {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
+ {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
+ {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
+ {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
+ {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
+ {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
+ {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
+ {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
+ {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
+ {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
+ {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
+ {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
+ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+ {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+ {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+ {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+ {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+ {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
+ {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
+ {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
+ {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
+ {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
+ {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
+ {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
+ {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+ {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+ {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+ {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+
+[[package]]
+name = "red-black-tree-mod"
+version = "1.20"
+description = "Flexible python implementation of red black trees"
+optional = false
+python-versions = "*"
+files = [
+ {file = "red-black-tree-mod-1.20.tar.gz", hash = "sha256:2448e6fc9cbf1be204c753f352c6ee49aa8156dbf1faa57dfc26bd7705077e0a"},
+]
+
+[[package]]
+name = "regex"
+version = "2023.6.3"
+description = "Alternative regular expression module, to replace re."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"},
+ {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"},
+ {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"},
+ {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"},
+ {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"},
+ {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"},
+ {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"},
+ {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"},
+ {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"},
+ {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"},
+ {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"},
+ {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"},
+ {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"},
+ {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"},
+ {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"},
+ {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"},
+ {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"},
+ {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"},
+ {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"},
+ {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"},
+ {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"},
+ {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"},
+ {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"},
+ {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"},
+ {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"},
+ {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"},
+ {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"},
+ {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"},
+ {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"},
+ {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"},
+ {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"},
+ {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"},
+ {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"},
+ {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"},
+ {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"},
+ {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"},
+ {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"},
+ {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"},
+ {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"},
+ {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"},
+ {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"},
+ {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"},
+ {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"},
+ {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"},
+ {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"},
+ {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"},
+ {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"},
+ {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"},
+ {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"},
+ {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"},
+ {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"},
+ {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"},
+ {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"},
+ {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"},
+ {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"},
+ {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"},
+ {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"},
+ {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"},
+ {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"},
+ {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"},
+ {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"},
+ {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"},
+ {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"},
+]
+
+[[package]]
+name = "requests"
+version = "2.31.0"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "rtfde"
+version = "0.0.2"
+description = "A library for extracting HTML content from RTF encapsulated HTML as commonly found in the exchange MSG email format."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "RTFDE-0.0.2-py3-none-any.whl", hash = "sha256:18386e4f060cee12a2a8035b0acf0cc99689f5dff1bf347bab7e92351860a21d"},
+ {file = "RTFDE-0.0.2.tar.gz", hash = "sha256:b86b5d734950fe8745a5b89133f50554252dbd67c6d1b9265e23ee140e7ea8a2"},
+]
+
+[package.dependencies]
+lark-parser = ">=0.11"
+oletools = ">=0.56"
+
+[package.extras]
+dev = ["lxml (>=4.6)"]
+msg-parse = ["extract-msg (>=0.27)"]
+
+[[package]]
+name = "safetensors"
+version = "0.3.2"
+description = "Fast and Safe Tensor serialization"
+optional = false
+python-versions = "*"
+files = [
+ {file = "safetensors-0.3.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b6a66989075c2891d743153e8ba9ca84ee7232c8539704488f454199b8b8f84d"},
+ {file = "safetensors-0.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:670d6bc3a3b377278ce2971fa7c36ebc0a35041c4ea23b9df750a39380800195"},
+ {file = "safetensors-0.3.2-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:564f42838721925b5313ae864ba6caa6f4c80a9fbe63cf24310c3be98ab013cd"},
+ {file = "safetensors-0.3.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:7f80af7e4ab3188daaff12d43d078da3017a90d732d38d7af4eb08b6ca2198a5"},
+ {file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec30d78f20f1235b252d59cbb9755beb35a1fde8c24c89b3c98e6a1804cfd432"},
+ {file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16063d94d8f600768d3c331b1e97964b1bf3772e19710105fe24ec5a6af63770"},
+ {file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb44e140bf2aeda98d9dde669dbec15f7b77f96a9274469b91a6cf4bcc5ec3b"},
+ {file = "safetensors-0.3.2-cp310-cp310-win32.whl", hash = "sha256:2961c1243fd0da46aa6a1c835305cc4595486f8ac64632a604d0eb5f2de76175"},
+ {file = "safetensors-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c813920482c337d1424d306e1b05824a38e3ef94303748a0a287dea7a8c4f805"},
+ {file = "safetensors-0.3.2-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:707df34bd9b9047e97332136ad98e57028faeccdb9cfe1c3b52aba5964cc24bf"},
+ {file = "safetensors-0.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:becc5bb85b2947eae20ed23b407ebfd5277d9a560f90381fe2c42e6c043677ba"},
+ {file = "safetensors-0.3.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:30a75707be5cc9686490bde14b9a371cede4af53244ea72b340cfbabfffdf58a"},
+ {file = "safetensors-0.3.2-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:54ad6af663e15e2b99e2ea3280981b7514485df72ba6d014dc22dae7ba6a5e6c"},
+ {file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37764b3197656ef507a266c453e909a3477dabc795962b38e3ad28226f53153b"},
+ {file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4939067736783acd8391d83cd97d6c202f94181951ce697d519f9746381b6a39"},
+ {file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0fac127ff8fb04834da5c6d85a8077e6a1c9180a11251d96f8068db922a17"},
+ {file = "safetensors-0.3.2-cp311-cp311-win32.whl", hash = "sha256:155b82dbe2b0ebff18cde3f76b42b6d9470296e92561ef1a282004d449fa2b4c"},
+ {file = "safetensors-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a86428d196959619ce90197731be9391b5098b35100a7228ef4643957648f7f5"},
+ {file = "safetensors-0.3.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c1f8ab41ed735c5b581f451fd15d9602ff51aa88044bfa933c5fa4b1d0c644d1"},
+ {file = "safetensors-0.3.2-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:bc9cfb3c9ea2aec89685b4d656f9f2296f0f0d67ecf2bebf950870e3be89b3db"},
+ {file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ace5d471e3d78e0d93f952707d808b5ab5eac77ddb034ceb702e602e9acf2be9"},
+ {file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de3e20a388b444381bcda1a3193cce51825ddca277e4cf3ed1fe8d9b2d5722cd"},
+ {file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d7d70d48585fe8df00725aa788f2e64fd24a4c9ae07cd6be34f6859d0f89a9c"},
+ {file = "safetensors-0.3.2-cp37-cp37m-win32.whl", hash = "sha256:6ff59bc90cdc857f68b1023be9085fda6202bbe7f2fd67d06af8f976d6adcc10"},
+ {file = "safetensors-0.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8b05c93da15fa911763a89281906ca333ed800ab0ef1c7ce53317aa1a2322f19"},
+ {file = "safetensors-0.3.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8969cfd9e8d904e8d3c67c989e1bd9a95e3cc8980d4f95e4dcd43c299bb94253"},
+ {file = "safetensors-0.3.2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:f54148ac027556eb02187e9bc1556c4d916c99ca3cb34ca36a7d304d675035c1"},
+ {file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caec25fedbcf73f66c9261984f07885680f71417fc173f52279276c7f8a5edd3"},
+ {file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50224a1d99927ccf3b75e27c3d412f7043280431ab100b4f08aad470c37cf99a"},
+ {file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa98f49e95f02eb750d32c4947e7d5aa43883149ebd0414920866446525b70f0"},
+ {file = "safetensors-0.3.2-cp38-cp38-win32.whl", hash = "sha256:33409df5e28a83dc5cc5547a3ac17c0f1b13a1847b1eb3bc4b3be0df9915171e"},
+ {file = "safetensors-0.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e04a7cbbb3856159ab99e3adb14521544f65fcb8548cce773a1435a0f8d78d27"},
+ {file = "safetensors-0.3.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7c864cf5dcbfb608c5378f83319c60cc9c97263343b57c02756b7613cd5ab4dd"},
+ {file = "safetensors-0.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:14e8c19d6dc51d4f70ee33c46aff04c8ba3f95812e74daf8036c24bc86e75cae"},
+ {file = "safetensors-0.3.2-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:042a60f633c3c7009fdf6a7c182b165cb7283649d2a1e9c7a4a1c23454bd9a5b"},
+ {file = "safetensors-0.3.2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:fafd95e5ef41e8f312e2a32b7031f7b9b2a621b255f867b221f94bb2e9f51ae8"},
+ {file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ed77cf358abce2307f03634694e0b2a29822e322a1623e0b1aa4b41e871bf8b"},
+ {file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d344e8b2681a33aafc197c90b0def3229b3317d749531c72fa6259d0caa5c8c"},
+ {file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ff0024ef2e5722a79af24688ce4a430f70601d0cf712a744105ed4b8f67ba5"},
+ {file = "safetensors-0.3.2-cp39-cp39-win32.whl", hash = "sha256:827af9478b78977248ba93e2fd97ea307fb63f463f80cef4824460f8c2542a52"},
+ {file = "safetensors-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9b09f27c456efa301f98681ea14b12f81f2637889f6336223ccab71e42c34541"},
+ {file = "safetensors-0.3.2.tar.gz", hash = "sha256:2dbd34554ed3b99435a0e84df077108f5334c8336b5ed9cb8b6b98f7b10da2f6"},
+]
+
+[package.extras]
+all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"]
+dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"]
+jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"]
+numpy = ["numpy (>=1.21.6)"]
+paddlepaddle = ["paddlepaddle (>=2.4.1)"]
+pinned-tf = ["tensorflow (==2.11.0)"]
+quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"]
+tensorflow = ["tensorflow (>=2.11.0)"]
+testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"]
+torch = ["torch (>=1.10)"]
+
+[[package]]
+name = "scikit-learn"
+version = "1.3.0"
+description = "A set of python modules for machine learning and data mining"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "scikit-learn-1.3.0.tar.gz", hash = "sha256:8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981287869e576d42c682cf7ca96af0c6ac544ed9316328fd0d9292795c742cf5"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:436aaaae2c916ad16631142488e4c82f4296af2404f480e031d866863425d2a2"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e28d8fa47a0b30ae1bd7a079519dd852764e31708a7804da6cb6f8b36e3630"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80c08834a473d08a204d966982a62e11c976228d306a2648c575e3ead12111"},
+ {file = "scikit_learn-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:552fd1b6ee22900cf1780d7386a554bb96949e9a359999177cf30211e6b20df6"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79970a6d759eb00a62266a31e2637d07d2d28446fca8079cf9afa7c07b0427f8"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:850a00b559e636b23901aabbe79b73dc604b4e4248ba9e2d6e72f95063765603"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee04835fb016e8062ee9fe9074aef9b82e430504e420bff51e3e5fffe72750ca"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d953531f5d9f00c90c34fa3b7d7cfb43ecff4c605dac9e4255a20b114a27369"},
+ {file = "scikit_learn-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:151ac2bf65ccf363664a689b8beafc9e6aae36263db114b4ca06fbbbf827444a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a885a9edc9c0a341cab27ec4f8a6c58b35f3d449c9d2503a6fd23e06bbd4f6a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:9877af9c6d1b15486e18a94101b742e9d0d2f343d35a634e337411ddb57783f3"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c470f53cea065ff3d588050955c492793bb50c19a92923490d18fcb637f6383a"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd6e2d7389542eae01077a1ee0318c4fec20c66c957f45c7aac0c6eb0fe3c612"},
+ {file = "scikit_learn-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a11936adbc379a6061ea32fa03338d4ca7248d86dd507c81e13af428a5bc1db"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:998d38fcec96584deee1e79cd127469b3ad6fefd1ea6c2dfc54e8db367eb396b"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ded35e810438a527e17623ac6deae3b360134345b7c598175ab7741720d7ffa7"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e8102d5036e28d08ab47166b48c8d5e5810704daecf3a476a4282d562be9a28"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7617164951c422747e7c32be4afa15d75ad8044f42e7d70d3e2e0429a50e6718"},
+ {file = "scikit_learn-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:1d54fb9e6038284548072df22fd34777e434153f7ffac72c8596f2d6987110dd"},
+]
+
+[package.dependencies]
+joblib = ">=1.1.1"
+numpy = ">=1.17.3"
+scipy = ">=1.5.0"
+threadpoolctl = ">=2.0.0"
+
+[package.extras]
+benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"]
+docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.10.1)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"]
+examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"]
+tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.16.2)"]
+
+[[package]]
+name = "scipy"
+version = "1.9.3"
+description = "Fundamental algorithms for scientific computing in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"},
+ {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"},
+ {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"},
+ {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"},
+ {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"},
+ {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"},
+ {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"},
+ {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"},
+ {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"},
+ {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"},
+ {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"},
+ {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"},
+ {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"},
+ {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"},
+ {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"},
+ {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"},
+ {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"},
+ {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"},
+ {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"},
+ {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"},
+ {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"},
+]
+
+[package.dependencies]
+numpy = ">=1.18.5,<1.26.0"
+
+[package.extras]
+dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"]
+doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"]
+test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+
+[[package]]
+name = "sentence-transformers"
+version = "2.2.2"
+description = "Multilingual text embeddings"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+ {file = "sentence-transformers-2.2.2.tar.gz", hash = "sha256:dbc60163b27de21076c9a30d24b5b7b6fa05141d68cf2553fa9a77bf79a29136"},
+]
+
+[package.dependencies]
+huggingface-hub = ">=0.4.0"
+nltk = "*"
+numpy = "*"
+scikit-learn = "*"
+scipy = "*"
+sentencepiece = "*"
+torch = ">=1.6.0"
+torchvision = "*"
+tqdm = "*"
+transformers = ">=4.6.0,<5.0.0"
+
+[[package]]
+name = "sentencepiece"
+version = "0.1.99"
+description = "SentencePiece python wrapper"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sentencepiece-0.1.99-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0eb528e70571b7c02723e5804322469b82fe7ea418c96051d0286c0fa028db73"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d7fafb2c4e4659cbdf303929503f37a26eabc4ff31d3a79bf1c5a1b338caa7"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be9cf5b9e404c245aeb3d3723c737ba7a8f5d4ba262ef233a431fa6c45f732a0"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baed1a26464998f9710d20e52607c29ffd4293e7c71c6a1f83f51ad0911ec12c"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9832f08bb372d4c8b567612f8eab9e36e268dff645f1c28f9f8e851be705f6d1"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:019e7535108e309dae2b253a75834fc3128240aa87c00eb80732078cdc182588"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-win32.whl", hash = "sha256:fa16a830416bb823fa2a52cbdd474d1f7f3bba527fd2304fb4b140dad31bb9bc"},
+ {file = "sentencepiece-0.1.99-cp310-cp310-win_amd64.whl", hash = "sha256:14b0eccb7b641d4591c3e12ae44cab537d68352e4d3b6424944f0c447d2348d5"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6d3c56f24183a1e8bd61043ff2c58dfecdc68a5dd8955dc13bab83afd5f76b81"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed6ea1819fd612c989999e44a51bf556d0ef6abfb553080b9be3d347e18bcfb7"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2a0260cd1fb7bd8b4d4f39dc2444a8d5fd4e0a0c4d5c899810ef1abf99b2d45"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a1abff4d1ff81c77cac3cc6fefa34fa4b8b371e5ee51cb7e8d1ebc996d05983"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:004e6a621d4bc88978eecb6ea7959264239a17b70f2cbc348033d8195c9808ec"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db361e03342c41680afae5807590bc88aa0e17cfd1a42696a160e4005fcda03b"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-win32.whl", hash = "sha256:2d95e19168875b70df62916eb55428a0cbcb834ac51d5a7e664eda74def9e1e0"},
+ {file = "sentencepiece-0.1.99-cp311-cp311-win_amd64.whl", hash = "sha256:f90d73a6f81248a909f55d8e6ef56fec32d559e1e9af045f0b0322637cb8e5c7"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:62e24c81e74bd87a6e0d63c51beb6527e4c0add67e1a17bac18bcd2076afcfeb"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57efcc2d51caff20d9573567d9fd3f854d9efe613ed58a439c78c9f93101384a"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a904c46197993bd1e95b93a6e373dca2f170379d64441041e2e628ad4afb16f"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89adf59854741c0d465f0e1525b388c0d174f611cc04af54153c5c4f36088c4"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-win32.whl", hash = "sha256:47c378146928690d1bc106fdf0da768cebd03b65dd8405aa3dd88f9c81e35dba"},
+ {file = "sentencepiece-0.1.99-cp36-cp36m-win_amd64.whl", hash = "sha256:9ba142e7a90dd6d823c44f9870abdad45e6c63958eb60fe44cca6828d3b69da2"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7b1a9ae4d7c6f1f867e63370cca25cc17b6f4886729595b885ee07a58d3cec3"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0f644c9d4d35c096a538507b2163e6191512460035bf51358794a78515b74f7"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8843d23a0f686d85e569bd6dcd0dd0e0cbc03731e63497ca6d5bacd18df8b85"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6f690a1caebb4867a2e367afa1918ad35be257ecdb3455d2bbd787936f155"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-win32.whl", hash = "sha256:8a321866c2f85da7beac74a824b4ad6ddc2a4c9bccd9382529506d48f744a12c"},
+ {file = "sentencepiece-0.1.99-cp37-cp37m-win_amd64.whl", hash = "sha256:c42f753bcfb7661c122a15b20be7f684b61fc8592c89c870adf52382ea72262d"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85b476406da69c70586f0bb682fcca4c9b40e5059814f2db92303ea4585c650c"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cfbcfe13c69d3f87b7fcd5da168df7290a6d006329be71f90ba4f56bc77f8561"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:445b0ec381af1cd4eef95243e7180c63d9c384443c16c4c47a28196bd1cda937"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6890ea0f2b4703f62d0bf27932e35808b1f679bdb05c7eeb3812b935ba02001"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb71af492b0eefbf9f2501bec97bcd043b6812ab000d119eaf4bd33f9e283d03"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27b866b5bd3ddd54166bbcbf5c8d7dd2e0b397fac8537991c7f544220b1f67bc"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-win32.whl", hash = "sha256:b133e8a499eac49c581c3c76e9bdd08c338cc1939e441fee6f92c0ccb5f1f8be"},
+ {file = "sentencepiece-0.1.99-cp38-cp38-win_amd64.whl", hash = "sha256:0eaf3591dd0690a87f44f4df129cf8d05d8a4029b5b6709b489b8e27f9a9bcff"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38efeda9bbfb55052d482a009c6a37e52f42ebffcea9d3a98a61de7aee356a28"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c030b081dc1e1bcc9fadc314b19b740715d3d566ad73a482da20d7d46fd444c"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84dbe53e02e4f8a2e45d2ac3e430d5c83182142658e25edd76539b7648928727"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b0f55d0a0ee1719b4b04221fe0c9f0c3461dc3dabd77a035fa2f4788eb3ef9a"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e800f206cd235dc27dc749299e05853a4e4332e8d3dfd81bf13d0e5b9007d9"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae1c40cda8f9d5b0423cfa98542735c0235e7597d79caf318855cdf971b2280"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-win32.whl", hash = "sha256:c84ce33af12ca222d14a1cdd37bd76a69401e32bc68fe61c67ef6b59402f4ab8"},
+ {file = "sentencepiece-0.1.99-cp39-cp39-win_amd64.whl", hash = "sha256:350e5c74d739973f1c9643edb80f7cc904dc948578bcb1d43c6f2b173e5d18dd"},
+ {file = "sentencepiece-0.1.99.tar.gz", hash = "sha256:189c48f5cb2949288f97ccdb97f0473098d9c3dcf5a3d99d4eabe719ec27297f"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+description = "Sniff out which async library your code is running under"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.4.1"
+description = "A modern CSS selector implementation for Beautiful Soup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"},
+ {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "2.0.18"
+description = "Database Abstraction Library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ddd6d35c598af872f9a0a5bce7f7c4a1841684a72dab3302e3df7f17d1b5249"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:00aa050faf24ce5f2af643e2b86822fa1d7149649995f11bc1e769bbfbf9010b"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b52c6741073de5a744d27329f9803938dcad5c9fee7e61690c705f72973f4175"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db97eabd440327c35b751d5ebf78a107f505586485159bcc87660da8bb1fdca"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:589aba9a35869695b319ed76c6f673d896cd01a7ff78054be1596df7ad9b096f"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9da4ee8f711e077633730955c8f3cd2485c9abf5ea0f80aac23221a3224b9a8c"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-win32.whl", hash = "sha256:5dd574a37be388512c72fe0d7318cb8e31743a9b2699847a025e0c08c5bf579d"},
+ {file = "SQLAlchemy-2.0.18-cp310-cp310-win_amd64.whl", hash = "sha256:6852cd34d96835e4c9091c1e6087325efb5b607b75fd9f7075616197d1c4688a"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10e001a84f820fea2640e4500e12322b03afc31d8f4f6b813b44813b2a7c7e0d"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bffd6cd47c2e68970039c0d3e355c9ed761d3ca727b204e63cd294cad0e3df90"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b7b3ebfa9416c8eafaffa65216e229480c495e305a06ba176dcac32710744e6"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79228a7b90d95957354f37b9d46f2cc8926262ae17b0d3ed8f36c892f2a37e06"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ba633b51835036ff0f402c21f3ff567c565a22ff0a5732b060a68f4660e2a38f"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8da677135eff43502b7afab5a1e641edfb2dc734ba7fc146e9b1b86817a728e2"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-win32.whl", hash = "sha256:82edf3a6090554a83942cec79151d6b5eb96e63d143e80e4cf6671e5d772f6be"},
+ {file = "SQLAlchemy-2.0.18-cp311-cp311-win_amd64.whl", hash = "sha256:69ae0e9509c43474e33152abe1385b8954922544616426bf793481e1a37e094f"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09397a18733fa2a4c7680b746094f980060666ee549deafdb5e102a99ce4619b"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45b07470571bda5ee7f5ec471271bbde97267cc8403fce05e280c36ea73f4754"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aac42a21a7fa6c9665392c840b295962992ddf40aecf0a88073bc5c76728117"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:da46beef0ce882546d92b7b2e8deb9e04dbb8fec72945a8eb28b347ca46bc15a"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a6f1d8256d06f58e6ece150fbe05c63c7f9510df99ee8ac37423f5476a2cebb4"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-win32.whl", hash = "sha256:67fbb40db3985c0cfb942fe8853ad94a5e9702d2987dec03abadc2f3b6a24afb"},
+ {file = "SQLAlchemy-2.0.18-cp37-cp37m-win_amd64.whl", hash = "sha256:afb322ca05e2603deedbcd2e9910f11a3fd2f42bdeafe63018e5641945c7491c"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:908c850b98cac1e203ababd4ba76868d19ae0d7172cdc75d3f1b7829b16837d2"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10514adc41fc8f5922728fbac13d401a1aefcf037f009e64ca3b92464e33bf0e"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b791577c546b6bbd7b43953565fcb0a2fec63643ad605353dd48afbc3c48317"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:420bc6d06d4ae7fb6921524334689eebcbea7bf2005efef070a8562cc9527a37"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ebdd2418ab4e2e26d572d9a1c03877f8514a9b7436729525aa571862507b3fea"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:556dc18e39b6edb76239acfd1c010e37395a54c7fde8c57481c15819a3ffb13e"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-win32.whl", hash = "sha256:7b8cba5a25e95041e3413d91f9e50616bcfaec95afa038ce7dc02efefe576745"},
+ {file = "SQLAlchemy-2.0.18-cp38-cp38-win_amd64.whl", hash = "sha256:0f7fdcce52cd882b559a57b484efc92e108efeeee89fab6b623aba1ac68aad2e"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d7a2c1e711ce59ac9d0bba780318bcd102d2958bb423209f24c6354d8c4da930"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c95e3e7cc6285bf7ff263eabb0d3bfe3def9a1ff98124083d45e5ece72f4579"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc44e50f9d5e96af1a561faa36863f9191f27364a4df3eb70bca66e9370480b6"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfa1a0f83bdf8061db8d17c2029454722043f1e4dd1b3d3d3120d1b54e75825a"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:194f2d5a7cb3739875c4d25b3fe288ab0b3dc33f7c857ba2845830c8c51170a0"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ebc542d2289c0b016d6945fd07a7e2e23f4abc41e731ac8ad18a9e0c2fd0ec2"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-win32.whl", hash = "sha256:774bd401e7993452ba0596e741c0c4d6d22f882dd2a798993859181dbffadc62"},
+ {file = "SQLAlchemy-2.0.18-cp39-cp39-win_amd64.whl", hash = "sha256:2756485f49e7df5c2208bdc64263d19d23eba70666f14ad12d6d8278a2fff65f"},
+ {file = "SQLAlchemy-2.0.18-py3-none-any.whl", hash = "sha256:6c5bae4c288bda92a7550fe8de9e068c0a7cd56b1c5d888aae5b40f0e13b40bd"},
+ {file = "SQLAlchemy-2.0.18.tar.gz", hash = "sha256:1fb792051db66e09c200e7bc3bda3b1eb18a5b8eb153d2cedb2b14b56a68b8cb"},
+]
+
+[package.dependencies]
+greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
+typing-extensions = ">=4.2.0"
+
+[package.extras]
+aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
+asyncio = ["greenlet (!=0.4.17)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql"]
+mssql-pyodbc = ["pyodbc"]
+mypy = ["mypy (>=0.910)"]
+mysql = ["mysqlclient (>=1.4.0)"]
+mysql-connector = ["mysql-connector-python"]
+oracle = ["cx-oracle (>=7)"]
+oracle-oracledb = ["oracledb (>=1.0.1)"]
+postgresql = ["psycopg2 (>=2.7)"]
+postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
+postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
+postgresql-psycopg = ["psycopg (>=3.0.7)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
+pymysql = ["pymysql"]
+sqlcipher = ["sqlcipher3-binary"]
+
+[[package]]
+name = "starlette"
+version = "0.27.0"
+description = "The little ASGI library that shines."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"},
+ {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"},
+]
+
+[package.dependencies]
+anyio = ">=3.4.0,<5"
+
+[package.extras]
+full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
+
+[[package]]
+name = "sympy"
+version = "1.12"
+description = "Computer algebra system (CAS) in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"},
+ {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"},
+]
+
+[package.dependencies]
+mpmath = ">=0.19"
+
+[[package]]
+name = "tabulate"
+version = "0.9.0"
+description = "Pretty-print tabular data"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
+ {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
+]
+
+[package.extras]
+widechars = ["wcwidth"]
+
+[[package]]
+name = "tenacity"
+version = "8.2.2"
+description = "Retry code until it succeeds"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"},
+ {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"},
+]
+
+[package.extras]
+doc = ["reno", "sphinx", "tornado (>=4.5)"]
+
+[[package]]
+name = "threadpoolctl"
+version = "3.2.0"
+description = "threadpoolctl"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "threadpoolctl-3.2.0-py3-none-any.whl", hash = "sha256:2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032"},
+ {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"},
+]
+
+[[package]]
+name = "tokenizers"
+version = "0.13.3"
+description = "Fast and Customizable Tokenizers"
+optional = false
+python-versions = "*"
+files = [
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"},
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"},
+ {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"},
+]
+
+[package.extras]
+dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
+testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+
+[[package]]
+name = "torch"
+version = "2.0.1"
+description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a"},
+ {file = "torch-2.0.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:359bfaad94d1cda02ab775dc1cc386d585712329bb47b8741607ef6ef4950747"},
+ {file = "torch-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:7c84e44d9002182edd859f3400deaa7410f5ec948a519cc7ef512c2f9b34d2c4"},
+ {file = "torch-2.0.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:567f84d657edc5582d716900543e6e62353dbe275e61cdc36eda4929e46df9e7"},
+ {file = "torch-2.0.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:787b5a78aa7917465e9b96399b883920c88a08f4eb63b5a5d2d1a16e27d2f89b"},
+ {file = "torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e617b1d0abaf6ced02dbb9486803abfef0d581609b09641b34fa315c9c40766d"},
+ {file = "torch-2.0.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b6019b1de4978e96daa21d6a3ebb41e88a0b474898fe251fd96189587408873e"},
+ {file = "torch-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:dbd68cbd1cd9da32fe5d294dd3411509b3d841baecb780b38b3b7b06c7754434"},
+ {file = "torch-2.0.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:ef654427d91600129864644e35deea761fb1fe131710180b952a6f2e2207075e"},
+ {file = "torch-2.0.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:25aa43ca80dcdf32f13da04c503ec7afdf8e77e3a0183dd85cd3e53b2842e527"},
+ {file = "torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5ef3ea3d25441d3957348f7e99c7824d33798258a2bf5f0f0277cbcadad2e20d"},
+ {file = "torch-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0882243755ff28895e8e6dc6bc26ebcf5aa0911ed81b2a12f241fc4b09075b13"},
+ {file = "torch-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:f66aa6b9580a22b04d0af54fcd042f52406a8479e2b6a550e3d9f95963e168c8"},
+ {file = "torch-2.0.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:1adb60d369f2650cac8e9a95b1d5758e25d526a34808f7448d0bd599e4ae9072"},
+ {file = "torch-2.0.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:1bcffc16b89e296826b33b98db5166f990e3b72654a2b90673e817b16c50e32b"},
+ {file = "torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e10e1597f2175365285db1b24019eb6f04d53dcd626c735fc502f1e8b6be9875"},
+ {file = "torch-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:423e0ae257b756bb45a4b49072046772d1ad0c592265c5080070e0767da4e490"},
+ {file = "torch-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8742bdc62946c93f75ff92da00e3803216c6cce9b132fbca69664ca38cfb3e18"},
+ {file = "torch-2.0.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:c62df99352bd6ee5a5a8d1832452110435d178b5164de450831a3a8cc14dc680"},
+ {file = "torch-2.0.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:671a2565e3f63b8fe8e42ae3e36ad249fe5e567435ea27b94edaa672a7d0c416"},
+]
+
+[package.dependencies]
+filelock = "*"
+jinja2 = "*"
+networkx = "*"
+sympy = "*"
+typing-extensions = "*"
+
+[package.extras]
+opt-einsum = ["opt-einsum (>=3.3)"]
+
+[[package]]
+name = "torchvision"
+version = "0.15.2"
+description = "image and video datasets and models for torch deep learning"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "torchvision-0.15.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7754088774e810c5672b142a45dcf20b1bd986a5a7da90f8660c43dc43fb850c"},
+ {file = "torchvision-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37eb138e13f6212537a3009ac218695483a635c404b6cc1d8e0d0d978026a86d"},
+ {file = "torchvision-0.15.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:54143f7cc0797d199b98a53b7d21c3f97615762d4dd17ad45a41c7e80d880e73"},
+ {file = "torchvision-0.15.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:1eefebf5fbd01a95fe8f003d623d941601c94b5cec547b420da89cb369d9cf96"},
+ {file = "torchvision-0.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:96fae30c5ca8423f4b9790df0f0d929748e32718d88709b7b567d2f630c042e3"},
+ {file = "torchvision-0.15.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f35f6bd5bcc4568e6522e4137fa60fcc72f4fa3e615321c26cd87e855acd398"},
+ {file = "torchvision-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:757505a0ab2be7096cb9d2bf4723202c971cceddb72c7952a7e877f773de0f8a"},
+ {file = "torchvision-0.15.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:012ad25cfd9019ff9b0714a168727e3845029be1af82296ff1e1482931fa4b80"},
+ {file = "torchvision-0.15.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b02a7ffeaa61448737f39a4210b8ee60234bda0515a0c0d8562f884454105b0f"},
+ {file = "torchvision-0.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:10be76ceded48329d0a0355ac33da131ee3993ff6c125e4a02ab34b5baa2472c"},
+ {file = "torchvision-0.15.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f12415b686dba884fb086f53ac803f692be5a5cdd8a758f50812b30fffea2e4"},
+ {file = "torchvision-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31211c01f8b8ec33b8a638327b5463212e79a03e43c895f88049f97af1bd12fd"},
+ {file = "torchvision-0.15.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c55f9889e436f14b4f84a9c00ebad0d31f5b4626f10cf8018e6c676f92a6d199"},
+ {file = "torchvision-0.15.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9a192f2aa979438f23c20e883980b23d13268ab9f819498774a6d2eb021802c2"},
+ {file = "torchvision-0.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:c07071bc8d02aa8fcdfe139ab6a1ef57d3b64c9e30e84d12d45c9f4d89fb6536"},
+ {file = "torchvision-0.15.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4790260fcf478a41c7ecc60a6d5200a88159fdd8d756e9f29f0f8c59c4a67a68"},
+ {file = "torchvision-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:987ab62225b4151a11e53fd06150c5258ced24ac9d7c547e0e4ab6fbca92a5ce"},
+ {file = "torchvision-0.15.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:63df26673e66cba3f17e07c327a8cafa3cce98265dbc3da329f1951d45966838"},
+ {file = "torchvision-0.15.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b85f98d4cc2f72452f6792ab4463a3541bc5678a8cdd3da0e139ba2fe8b56d42"},
+ {file = "torchvision-0.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:07c462524cc1bba5190c16a9d47eac1fca024d60595a310f23c00b4ffff18b30"},
+]
+
+[package.dependencies]
+numpy = "*"
+pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0"
+requests = "*"
+torch = "2.0.1"
+
+[package.extras]
+scipy = ["scipy"]
+
+[[package]]
+name = "tqdm"
+version = "4.65.0"
+description = "Fast, Extensible Progress Meter"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"},
+ {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["py-make (>=0.1.0)", "twine", "wheel"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
+[[package]]
+name = "transformers"
+version = "4.31.0"
+description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "transformers-4.31.0-py3-none-any.whl", hash = "sha256:8487aab0195ce1c2a5ae189305118b9720daddbc7b688edb09ccd79e3b149f6b"},
+ {file = "transformers-4.31.0.tar.gz", hash = "sha256:4302fba920a1c24d3a429a29efff6a63eac03f3f3cf55b55927fc795d01cb273"},
+]
+
+[package.dependencies]
+filelock = "*"
+huggingface-hub = ">=0.14.1,<1.0"
+numpy = ">=1.17"
+packaging = ">=20.0"
+pyyaml = ">=5.1"
+regex = "!=2019.12.17"
+requests = "*"
+safetensors = ">=0.3.1"
+tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14"
+tqdm = ">=4.27"
+
+[package.extras]
+accelerate = ["accelerate (>=0.20.3)"]
+agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"]
+all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
+audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+codecarbon = ["codecarbon (==1.2.0)"]
+deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"]
+deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"]
+dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"]
+dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
+docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
+docs-specific = ["hf-doc-builder"]
+fairscale = ["fairscale (>0.3)"]
+flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"]
+flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+ftfy = ["ftfy"]
+integrations = ["optuna", "ray[tune]", "sigopt"]
+ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"]
+modelcreation = ["cookiecutter (==1.7.3)"]
+natten = ["natten (>=0.14.6)"]
+onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"]
+onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"]
+optuna = ["optuna"]
+quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"]
+ray = ["ray[tune]"]
+retrieval = ["datasets (!=2.5.0)", "faiss-cpu"]
+sagemaker = ["sagemaker (>=2.31.0)"]
+sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"]
+serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"]
+sigopt = ["sigopt"]
+sklearn = ["scikit-learn"]
+speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"]
+tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"]
+tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"]
+tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+timm = ["timm"]
+tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"]
+torch = ["accelerate (>=0.20.3)", "torch (>=1.9,!=1.12.0)"]
+torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+torch-vision = ["Pillow (<10.0.0)", "torchvision"]
+torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"]
+video = ["av (==9.2.0)", "decord (==0.6.0)"]
+vision = ["Pillow (<10.0.0)"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.7.1"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
+ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
+]
+
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+description = "Runtime inspection utilities for typing module."
+optional = false
+python-versions = "*"
+files = [
+ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
+ {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=0.3.0"
+typing-extensions = ">=3.7.4"
+
+[[package]]
+name = "tzdata"
+version = "2023.3"
+description = "Provider of IANA time zone data"
+optional = false
+python-versions = ">=2"
+files = [
+ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"},
+ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"},
+]
+
+[[package]]
+name = "tzlocal"
+version = "5.0.1"
+description = "tzinfo object for the local timezone"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"},
+ {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"},
+]
+
+[package.dependencies]
+tzdata = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
+
+[[package]]
+name = "unstructured"
+version = "0.8.0"
+description = "A library that prepares raw documents for downstream ML tasks."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "unstructured-0.8.0-py3-none-any.whl", hash = "sha256:d6f574327f6b371f8bbe8d2c6861d8b40114ead9b920054c3601449ec72c3e42"},
+ {file = "unstructured-0.8.0.tar.gz", hash = "sha256:528b9140ef56bee3f7eabd23e7203f3f6890c1a72188d640985f86a3f842c565"},
+]
+
+[package.dependencies]
+argilla = "*"
+chardet = "*"
+filetype = "*"
+lxml = "*"
+markdown = "*"
+msg-parser = "*"
+nltk = "*"
+openpyxl = "*"
+pandas = "*"
+pdf2image = "*"
+"pdfminer.six" = "*"
+pillow = "*"
+pypandoc = "*"
+python-docx = "*"
+python-magic = "*"
+python-pptx = "*"
+requests = "*"
+tabulate = "*"
+xlrd = "*"
+
+[package.extras]
+azure = ["adlfs", "fsspec"]
+discord = ["discord-py"]
+dropbox = ["dropboxdrivefs", "fsspec"]
+elasticsearch = ["elasticsearch", "jq"]
+gcs = ["fsspec", "gcsfs"]
+github = ["pygithub (==1.58.2)"]
+gitlab = ["python-gitlab"]
+google-drive = ["google-api-python-client"]
+huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"]
+local-inference = ["unstructured-inference (==0.5.5)"]
+reddit = ["praw"]
+s3 = ["fsspec", "s3fs"]
+slack = ["slack-sdk"]
+wikipedia = ["wikipedia"]
+
+[[package]]
+name = "urllib3"
+version = "2.0.3"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"},
+ {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "uvicorn"
+version = "0.22.0"
+description = "The lightning-fast ASGI server."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"},
+ {file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"},
+]
+
+[package.dependencies]
+click = ">=7.0"
+colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""}
+h11 = ">=0.8"
+httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
+python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
+uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
+watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
+
+[package.extras]
+standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+
+[[package]]
+name = "uvloop"
+version = "0.17.0"
+description = "Fast implementation of asyncio event loop on top of libuv"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"},
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"},
+ {file = "uvloop-0.17.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b"},
+ {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8"},
+ {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62"},
+ {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d"},
+ {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"},
+ {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"},
+]
+
+[package.extras]
+dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"]
+
+[[package]]
+name = "watchfiles"
+version = "0.19.0"
+description = "Simple, modern and high performance file watching and code reload in python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
+ {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
+]
+
+[package.dependencies]
+anyio = ">=3.0.0"
+
+[[package]]
+name = "websockets"
+version = "11.0.3"
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"},
+ {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"},
+ {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"},
+ {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"},
+ {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"},
+ {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"},
+ {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"},
+ {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"},
+ {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"},
+ {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"},
+ {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"},
+ {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"},
+ {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"},
+ {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"},
+ {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"},
+ {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"},
+ {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"},
+ {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"},
+ {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"},
+ {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"},
+ {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"},
+ {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"},
+]
+
+[[package]]
+name = "win-unicode-console"
+version = "0.5"
+description = "Enable Unicode input and display when running Python from Windows console."
+optional = false
+python-versions = "*"
+files = [
+ {file = "win_unicode_console-0.5.zip", hash = "sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e"},
+]
+
+[[package]]
+name = "xlrd"
+version = "2.0.1"
+description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"},
+ {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"},
+]
+
+[package.extras]
+build = ["twine", "wheel"]
+docs = ["sphinx"]
+test = ["pytest", "pytest-cov"]
+
+[[package]]
+name = "xlsxwriter"
+version = "3.1.2"
+description = "A Python module for creating Excel XLSX files."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "XlsxWriter-3.1.2-py3-none-any.whl", hash = "sha256:331508ff39d610ecdaf979e458840bc1eab6e6a02cfd5d08f044f0f73636236f"},
+ {file = "XlsxWriter-3.1.2.tar.gz", hash = "sha256:78751099a770273f1c98b8d6643351f68f98ae8e6acf9d09d37dc6798f8cd3de"},
+]
+
+[[package]]
+name = "yarl"
+version = "1.9.2"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[[package]]
+name = "zipp"
+version = "3.16.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"},
+ {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
+[[package]]
+name = "zstandard"
+version = "0.21.0"
+description = "Zstandard bindings for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zstandard-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:649a67643257e3b2cff1c0a73130609679a5673bf389564bc6d4b164d822a7ce"},
+ {file = "zstandard-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:144a4fe4be2e747bf9c646deab212666e39048faa4372abb6a250dab0f347a29"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b72060402524ab91e075881f6b6b3f37ab715663313030d0ce983da44960a86f"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8257752b97134477fb4e413529edaa04fc0457361d304c1319573de00ba796b1"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c053b7c4cbf71cc26808ed67ae955836232f7638444d709bfc302d3e499364fa"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2769730c13638e08b7a983b32cb67775650024632cd0476bf1ba0e6360f5ac7d"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d3bc4de588b987f3934ca79140e226785d7b5e47e31756761e48644a45a6766"},
+ {file = "zstandard-0.21.0-cp310-cp310-win32.whl", hash = "sha256:67829fdb82e7393ca68e543894cd0581a79243cc4ec74a836c305c70a5943f07"},
+ {file = "zstandard-0.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6048a287f8d2d6e8bc67f6b42a766c61923641dd4022b7fd3f7439e17ba5a4d"},
+ {file = "zstandard-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f2afab2c727b6a3d466faee6974a7dad0d9991241c498e7317e5ccf53dbc766"},
+ {file = "zstandard-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff0852da2abe86326b20abae912d0367878dd0854b8931897d44cfeb18985472"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12fa383e315b62630bd407477d750ec96a0f438447d0e6e496ab67b8b451d39"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1b9703fe2e6b6811886c44052647df7c37478af1b4a1a9078585806f42e5b15"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df28aa5c241f59a7ab524f8ad8bb75d9a23f7ed9d501b0fed6d40ec3064784e8"},
+ {file = "zstandard-0.21.0-cp311-cp311-win32.whl", hash = "sha256:0aad6090ac164a9d237d096c8af241b8dcd015524ac6dbec1330092dba151657"},
+ {file = "zstandard-0.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:48b6233b5c4cacb7afb0ee6b4f91820afbb6c0e3ae0fa10abbc20000acdf4f11"},
+ {file = "zstandard-0.21.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e7d560ce14fd209db6adacce8908244503a009c6c39eee0c10f138996cd66d3e"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e6e131a4df2eb6f64961cea6f979cdff22d6e0d5516feb0d09492c8fd36f3bc"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1e0c62a67ff425927898cf43da2cf6b852289ebcc2054514ea9bf121bec10a5"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1545fb9cb93e043351d0cb2ee73fa0ab32e61298968667bb924aac166278c3fc"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6c821eb6870f81d73bf10e5deed80edcac1e63fbc40610e61f340723fd5f7c"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ddb086ea3b915e50f6604be93f4f64f168d3fc3cef3585bb9a375d5834392d4f"},
+ {file = "zstandard-0.21.0-cp37-cp37m-win32.whl", hash = "sha256:57ac078ad7333c9db7a74804684099c4c77f98971c151cee18d17a12649bc25c"},
+ {file = "zstandard-0.21.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1243b01fb7926a5a0417120c57d4c28b25a0200284af0525fddba812d575f605"},
+ {file = "zstandard-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea68b1ba4f9678ac3d3e370d96442a6332d431e5050223626bdce748692226ea"},
+ {file = "zstandard-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8070c1cdb4587a8aa038638acda3bd97c43c59e1e31705f2766d5576b329e97c"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af612c96599b17e4930fe58bffd6514e6c25509d120f4eae6031b7595912f85"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff891e37b167bc477f35562cda1248acc115dbafbea4f3af54ec70821090965"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fec02ce2b38e8b2e86079ff0b912445495e8ab0b137f9c0505f88ad0d61296"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdbe350691dec3078b187b8304e6a9c4d9db3eb2d50ab5b1d748533e746d099"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b69cccd06a4a0a1d9fb3ec9a97600055cf03030ed7048d4bcb88c574f7895773"},
+ {file = "zstandard-0.21.0-cp38-cp38-win32.whl", hash = "sha256:9980489f066a391c5572bc7dc471e903fb134e0b0001ea9b1d3eff85af0a6f1b"},
+ {file = "zstandard-0.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e1e94a9d9e35dc04bf90055e914077c80b1e0c15454cc5419e82529d3e70728"},
+ {file = "zstandard-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2d61675b2a73edcef5e327e38eb62bdfc89009960f0e3991eae5cc3d54718de"},
+ {file = "zstandard-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25fbfef672ad798afab12e8fd204d122fca3bc8e2dcb0a2ba73bf0a0ac0f5f07"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62957069a7c2626ae80023998757e27bd28d933b165c487ab6f83ad3337f773d"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e10ed461e4807471075d4b7a2af51f5234c8f1e2a0c1d37d5ca49aaaad49e8"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cff89a036c639a6a9299bf19e16bfb9ac7def9a7634c52c257166db09d950e7"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b2b5e3e7670bd25835e0e0730a236f2b0df87672d99d3bf4bf87248aa659fb"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b1367da0dde8ae5040ef0413fb57b5baeac39d8931c70536d5f013b11d3fc3a5"},
+ {file = "zstandard-0.21.0-cp39-cp39-win32.whl", hash = "sha256:db62cbe7a965e68ad2217a056107cc43d41764c66c895be05cf9c8b19578ce9c"},
+ {file = "zstandard-0.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8d200617d5c876221304b0e3fe43307adde291b4a897e7b0617a61611dfff6a"},
+ {file = "zstandard-0.21.0.tar.gz", hash = "sha256:f08e3a10d01a247877e4cb61a82a319ea746c356a3786558bed2481e6c405546"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
+
+[package.extras]
+cffi = ["cffi (>=1.11)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.10"
+content-hash = "92c090ae111eaa2821badd95cf3880b678fcbf647241c9edc8f789009acf76c8"
diff --git a/ollama/examples/langchain-python-rag-privategpt/privateGPT.py b/ollama/examples/langchain-python-rag-privategpt/privateGPT.py
new file mode 100755
index 0000000000000000000000000000000000000000..7d97a56788e72c7a8bc6386c09cb6ba4cbe7e687
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/privateGPT.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+from langchain.chains import RetrievalQA
+from langchain.embeddings import HuggingFaceEmbeddings
+from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
+from langchain.vectorstores import Chroma
+from langchain.llms import Ollama
+import chromadb
+import os
+import argparse
+import time
+
+model = os.environ.get("MODEL", "llama2-uncensored")
+# For embeddings model, the example uses a sentence-transformers model
+# https://www.sbert.net/docs/pretrained_models.html
+# "The all-mpnet-base-v2 model provides the best quality, while all-MiniLM-L6-v2 is 5 times faster and still offers good quality."
+embeddings_model_name = os.environ.get("EMBEDDINGS_MODEL_NAME", "all-MiniLM-L6-v2")
+persist_directory = os.environ.get("PERSIST_DIRECTORY", "db")
+target_source_chunks = int(os.environ.get('TARGET_SOURCE_CHUNKS',4))
+
+from constants import CHROMA_SETTINGS
+
+def main():
+ # Parse the command line arguments
+ args = parse_arguments()
+ embeddings = HuggingFaceEmbeddings(model_name=embeddings_model_name)
+
+ db = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
+
+ retriever = db.as_retriever(search_kwargs={"k": target_source_chunks})
+ # activate/deactivate the streaming StdOut callback for LLMs
+ callbacks = [] if args.mute_stream else [StreamingStdOutCallbackHandler()]
+
+ llm = Ollama(model=model, callbacks=callbacks)
+
+ qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source)
+ # Interactive questions and answers
+ while True:
+ query = input("\nEnter a query: ")
+ if query == "exit":
+ break
+ if query.strip() == "":
+ continue
+
+ # Get the answer from the chain
+ start = time.time()
+ res = qa(query)
+ answer, docs = res['result'], [] if args.hide_source else res['source_documents']
+ end = time.time()
+
+ # Print the result
+ print("\n\n> Question:")
+ print(query)
+ print(answer)
+
+ # Print the relevant sources used for the answer
+ for document in docs:
+ print("\n> " + document.metadata["source"] + ":")
+ print(document.page_content)
+
+def parse_arguments():
+ parser = argparse.ArgumentParser(description='privateGPT: Ask questions to your documents without an internet connection, '
+ 'using the power of LLMs.')
+ parser.add_argument("--hide-source", "-S", action='store_true',
+ help='Use this flag to disable printing of source documents used for answers.')
+
+ parser.add_argument("--mute-stream", "-M",
+ action='store_true',
+ help='Use this flag to disable the streaming StdOut callback for LLMs.')
+
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ollama/examples/langchain-python-rag-privategpt/pyproject.toml b/ollama/examples/langchain-python-rag-privategpt/pyproject.toml
new file mode 100755
index 0000000000000000000000000000000000000000..fa65a737462c48d6fc6f1ba775dd041a7bd68d1b
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/pyproject.toml
@@ -0,0 +1,26 @@
+[tool.poetry]
+name = "privategpt"
+version = "0.1.0"
+description = ""
+authors = ["Ivan Martinez "]
+license = "Apache Version 2.0"
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.10"
+langchain = "0.0.261"
+gpt4all = "^1.0.3"
+chromadb = "^0.3.26"
+PyMuPDF = "^1.22.5"
+python-dotenv = "^1.0.0"
+unstructured = "^0.8.0"
+extract-msg = "^0.41.5"
+tabulate = "^0.9.0"
+pandoc = "^2.3"
+pypandoc = "^1.11"
+tqdm = "^4.65.0"
+sentence-transformers = "^2.2.2"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/ollama/examples/langchain-python-rag-privategpt/requirements.txt b/ollama/examples/langchain-python-rag-privategpt/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..0aad1fe559918f49fecfd89f363a055dfde3807c
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-privategpt/requirements.txt
@@ -0,0 +1,15 @@
+langchain==0.0.274
+gpt4all==1.0.8
+chromadb==0.4.7
+llama-cpp-python==0.1.81
+urllib3==2.0.4
+PyMuPDF==1.23.5
+python-dotenv==1.0.0
+unstructured==0.10.8
+extract-msg==0.45.0
+tabulate==0.9.0
+pandoc==2.3
+pypandoc==1.11
+tqdm==4.66.1
+sentence_transformers==2.2.2
+numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
\ No newline at end of file
diff --git a/ollama/examples/langchain-python-rag-websummary/README.md b/ollama/examples/langchain-python-rag-websummary/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..29c706a392fd2647f3f80f0d9d3bffb3802c821d
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-websummary/README.md
@@ -0,0 +1,23 @@
+# LangChain Web Summarization
+
+This example summarizes the website, [https://ollama.com/blog/run-llama2-uncensored-locally](https://ollama.com/blog/run-llama2-uncensored-locally)
+
+## Running the Example
+
+1. Ensure you have the `llama3.1` model installed:
+
+ ```bash
+ ollama pull llama3.1
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python main.py
+ ```
diff --git a/ollama/examples/langchain-python-rag-websummary/main.py b/ollama/examples/langchain-python-rag-websummary/main.py
new file mode 100755
index 0000000000000000000000000000000000000000..77b09fbbcdfac3a8f3703a9b8ebcc8361cd0fa98
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-websummary/main.py
@@ -0,0 +1,12 @@
+from langchain_community.llms import Ollama
+from langchain_community.document_loaders import WebBaseLoader
+from langchain.chains.summarize import load_summarize_chain
+
+loader = WebBaseLoader("https://ollama.com/blog/run-llama2-uncensored-locally")
+docs = loader.load()
+
+llm = Ollama(model="llama3.1")
+chain = load_summarize_chain(llm, chain_type="stuff")
+
+result = chain.invoke(docs)
+print(result)
diff --git a/ollama/examples/langchain-python-rag-websummary/requirements.txt b/ollama/examples/langchain-python-rag-websummary/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..33cf51b301b484eefed0fd2b1e306006c1413937
--- /dev/null
+++ b/ollama/examples/langchain-python-rag-websummary/requirements.txt
@@ -0,0 +1 @@
+langchain==0.0.259
diff --git a/ollama/examples/langchain-python-simple/README.md b/ollama/examples/langchain-python-simple/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..60db2c8c3d80ca73888954b5ea7fcd7ea0b73442
--- /dev/null
+++ b/ollama/examples/langchain-python-simple/README.md
@@ -0,0 +1,23 @@
+# LangChain
+
+This example is a basic "hello world" of using LangChain with Ollama.
+
+## Running the Example
+
+1. Ensure you have the `llama3.1` model installed:
+
+ ```bash
+ ollama pull llama3.1
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python main.py
+ ```
diff --git a/ollama/examples/langchain-python-simple/main.py b/ollama/examples/langchain-python-simple/main.py
new file mode 100755
index 0000000000000000000000000000000000000000..a7ed81d679b1ee2f712841cd1d82f5e76ebcf8e8
--- /dev/null
+++ b/ollama/examples/langchain-python-simple/main.py
@@ -0,0 +1,6 @@
+from langchain.llms import Ollama
+
+input = input("What is your question?")
+llm = Ollama(model="llama3.1")
+res = llm.predict(input)
+print (res)
diff --git a/ollama/examples/langchain-python-simple/requirements.txt b/ollama/examples/langchain-python-simple/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..33cf51b301b484eefed0fd2b1e306006c1413937
--- /dev/null
+++ b/ollama/examples/langchain-python-simple/requirements.txt
@@ -0,0 +1 @@
+langchain==0.0.259
diff --git a/ollama/examples/langchain-typescript-simple/README.md b/ollama/examples/langchain-typescript-simple/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..7c65ccfa63021279585dc15dae5fceeb08ae6d55
--- /dev/null
+++ b/ollama/examples/langchain-typescript-simple/README.md
@@ -0,0 +1,23 @@
+# LangChain
+
+This example is a basic "hello world" of using LangChain with Ollama using Node.js and Typescript.
+
+## Running the Example
+
+1. Install the prerequisites:
+
+ ```bash
+ npm install
+ ```
+
+2. Ensure the `mistral` model is available:
+
+ ```bash
+ ollama pull mistral
+ ```
+
+3. Run the example:
+
+ ```bash
+ npm start
+ ```
diff --git a/ollama/examples/langchain-typescript-simple/main.ts b/ollama/examples/langchain-typescript-simple/main.ts
new file mode 100755
index 0000000000000000000000000000000000000000..53a58371a107c5598034e6273e848064c41a9fb2
--- /dev/null
+++ b/ollama/examples/langchain-typescript-simple/main.ts
@@ -0,0 +1,25 @@
+import { Ollama } from 'langchain/llms/ollama';
+import * as readline from "readline";
+
+async function main() {
+ const ollama = new Ollama({
+ model: 'mistral'
+ // other parameters can be found at https://js.langchain.com/docs/api/llms_ollama/classes/Ollama
+ });
+
+ const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout,
+ });
+
+ rl.question("What is your question: \n", async (user_input) => {
+ const stream = await ollama.stream(user_input);
+
+ for await (const chunk of stream) {
+ process.stdout.write(chunk);
+ }
+ rl.close();
+ })
+}
+
+main();
\ No newline at end of file
diff --git a/ollama/examples/langchain-typescript-simple/package-lock.json b/ollama/examples/langchain-typescript-simple/package-lock.json
new file mode 100755
index 0000000000000000000000000000000000000000..90587d204229ad9eaf570dd908a78110fb1dcf97
--- /dev/null
+++ b/ollama/examples/langchain-typescript-simple/package-lock.json
@@ -0,0 +1,997 @@
+{
+ "name": "langchain-typescript-simple",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "dependencies": {
+ "langchain": "^0.0.165"
+ },
+ "devDependencies": {
+ "typescript": "^5.2.2"
+ }
+ },
+ "node_modules/@anthropic-ai/sdk": {
+ "version": "0.6.2",
+ "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.6.2.tgz",
+ "integrity": "sha512-fB9PUj9RFT+XjkL+E9Ol864ZIJi+1P8WnbHspN3N3/GK2uSzjd0cbVIKTGgf4v3N8MwaQu+UWnU7C4BG/fap/g==",
+ "dependencies": {
+ "@types/node": "^18.11.18",
+ "@types/node-fetch": "^2.6.4",
+ "abort-controller": "^3.0.0",
+ "agentkeepalive": "^4.2.1",
+ "digest-fetch": "^1.3.0",
+ "form-data-encoder": "1.7.2",
+ "formdata-node": "^4.3.2",
+ "node-fetch": "^2.6.7"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "18.18.4",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.4.tgz",
+ "integrity": "sha512-t3rNFBgJRugIhackit2mVcLfF6IRc0JE4oeizPQL8Zrm8n2WY/0wOdpOPhdtG0V9Q2TlW/axbF1MJ6z+Yj/kKQ=="
+ },
+ "node_modules/@types/node-fetch": {
+ "version": "2.6.6",
+ "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.6.tgz",
+ "integrity": "sha512-95X8guJYhfqiuVVhRFxVQcf4hW/2bCuoPwDasMf/531STFoNoWTT7YDnWdXHEZKqAGUigmpG31r2FE70LwnzJw==",
+ "dependencies": {
+ "@types/node": "*",
+ "form-data": "^4.0.0"
+ }
+ },
+ "node_modules/@types/retry": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
+ "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
+ },
+ "node_modules/@types/uuid": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.5.tgz",
+ "integrity": "sha512-xfHdwa1FMJ082prjSJpoEI57GZITiQz10r3vEJCHa2khEFQjKy91aWKz6+zybzssCvXUwE1LQWgWVwZ4nYUvHQ=="
+ },
+ "node_modules/abort-controller": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
+ "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
+ "dependencies": {
+ "event-target-shim": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=6.5"
+ }
+ },
+ "node_modules/agentkeepalive": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz",
+ "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==",
+ "dependencies": {
+ "humanize-ms": "^1.2.1"
+ },
+ "engines": {
+ "node": ">= 8.0.0"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
+ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
+ },
+ "node_modules/asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
+ },
+ "node_modules/base-64": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
+ "integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA=="
+ },
+ "node_modules/base64-js": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
+ "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/binary-search": {
+ "version": "1.3.6",
+ "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz",
+ "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA=="
+ },
+ "node_modules/camelcase": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
+ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/charenc": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
+ "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==",
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "dependencies": {
+ "delayed-stream": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/commander": {
+ "version": "10.0.1",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
+ "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/crypt": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
+ "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==",
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/decamelize": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
+ "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/digest-fetch": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz",
+ "integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==",
+ "dependencies": {
+ "base-64": "^0.1.0",
+ "md5": "^2.3.0"
+ }
+ },
+ "node_modules/event-target-shim": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
+ "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/eventemitter3": {
+ "version": "4.0.7",
+ "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
+ "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
+ },
+ "node_modules/expr-eval": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/expr-eval/-/expr-eval-2.0.2.tgz",
+ "integrity": "sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg=="
+ },
+ "node_modules/flat": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
+ "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
+ "bin": {
+ "flat": "cli.js"
+ }
+ },
+ "node_modules/form-data": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
+ "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
+ "dependencies": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.8",
+ "mime-types": "^2.1.12"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/form-data-encoder": {
+ "version": "1.7.2",
+ "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz",
+ "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A=="
+ },
+ "node_modules/formdata-node": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz",
+ "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==",
+ "dependencies": {
+ "node-domexception": "1.0.0",
+ "web-streams-polyfill": "4.0.0-beta.3"
+ },
+ "engines": {
+ "node": ">= 12.20"
+ }
+ },
+ "node_modules/humanize-ms": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
+ "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==",
+ "dependencies": {
+ "ms": "^2.0.0"
+ }
+ },
+ "node_modules/is-any-array": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-any-array/-/is-any-array-2.0.1.tgz",
+ "integrity": "sha512-UtilS7hLRu++wb/WBAw9bNuP1Eg04Ivn1vERJck8zJthEvXCBEBpGR/33u/xLKWEQf95803oalHrVDptcAvFdQ=="
+ },
+ "node_modules/is-buffer": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
+ "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
+ },
+ "node_modules/js-tiktoken": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/js-tiktoken/-/js-tiktoken-1.0.7.tgz",
+ "integrity": "sha512-biba8u/clw7iesNEWLOLwrNGoBP2lA+hTaBLs/D45pJdUPFXyxD6nhcDVtADChghv4GgyAiMKYMiRx7x6h7Biw==",
+ "dependencies": {
+ "base64-js": "^1.5.1"
+ }
+ },
+ "node_modules/js-yaml": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+ "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "dependencies": {
+ "argparse": "^2.0.1"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
+ "node_modules/jsonpointer": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz",
+ "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/langchain": {
+ "version": "0.0.165",
+ "resolved": "https://registry.npmjs.org/langchain/-/langchain-0.0.165.tgz",
+ "integrity": "sha512-CpbNpjwaE+9lzjdw+pZz0VgnRrFivEgr7CVp9dDaAb5JpaJAA4V2v6uQ9ZPN+TSqupTQ79HFn2sfyZVEl2EG7Q==",
+ "dependencies": {
+ "@anthropic-ai/sdk": "^0.6.2",
+ "ansi-styles": "^5.0.0",
+ "binary-extensions": "^2.2.0",
+ "camelcase": "6",
+ "decamelize": "^1.2.0",
+ "expr-eval": "^2.0.2",
+ "flat": "^5.0.2",
+ "js-tiktoken": "^1.0.7",
+ "js-yaml": "^4.1.0",
+ "jsonpointer": "^5.0.1",
+ "langchainhub": "~0.0.6",
+ "langsmith": "~0.0.31",
+ "ml-distance": "^4.0.0",
+ "object-hash": "^3.0.0",
+ "openai": "~4.4.0",
+ "openapi-types": "^12.1.3",
+ "p-queue": "^6.6.2",
+ "p-retry": "4",
+ "uuid": "^9.0.0",
+ "yaml": "^2.2.1",
+ "zod": "^3.22.3",
+ "zod-to-json-schema": "^3.20.4"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@aws-crypto/sha256-js": "^5.0.0",
+ "@aws-sdk/client-bedrock-runtime": "^3.422.0",
+ "@aws-sdk/client-dynamodb": "^3.310.0",
+ "@aws-sdk/client-kendra": "^3.352.0",
+ "@aws-sdk/client-lambda": "^3.310.0",
+ "@aws-sdk/client-s3": "^3.310.0",
+ "@aws-sdk/client-sagemaker-runtime": "^3.310.0",
+ "@aws-sdk/client-sfn": "^3.310.0",
+ "@aws-sdk/credential-provider-node": "^3.388.0",
+ "@azure/storage-blob": "^12.15.0",
+ "@clickhouse/client": "^0.0.14",
+ "@cloudflare/ai": "^1.0.12",
+ "@elastic/elasticsearch": "^8.4.0",
+ "@getmetal/metal-sdk": "*",
+ "@getzep/zep-js": "^0.7.0",
+ "@gomomento/sdk": "^1.23.0",
+ "@google-ai/generativelanguage": "^0.2.1",
+ "@google-cloud/storage": "^6.10.1",
+ "@huggingface/inference": "^1.5.1",
+ "@mozilla/readability": "*",
+ "@notionhq/client": "^2.2.10",
+ "@opensearch-project/opensearch": "*",
+ "@pinecone-database/pinecone": "^1.1.0",
+ "@planetscale/database": "^1.8.0",
+ "@qdrant/js-client-rest": "^1.2.0",
+ "@raycast/api": "^1.55.2",
+ "@smithy/eventstream-codec": "^2.0.5",
+ "@smithy/protocol-http": "^3.0.6",
+ "@smithy/signature-v4": "^2.0.10",
+ "@smithy/util-utf8": "^2.0.0",
+ "@supabase/postgrest-js": "^1.1.1",
+ "@supabase/supabase-js": "^2.10.0",
+ "@tensorflow-models/universal-sentence-encoder": "*",
+ "@tensorflow/tfjs-converter": "*",
+ "@tensorflow/tfjs-core": "*",
+ "@upstash/redis": "^1.20.6",
+ "@vercel/postgres": "^0.5.0",
+ "@writerai/writer-sdk": "^0.40.2",
+ "@xata.io/client": "^0.25.1",
+ "@xenova/transformers": "^2.5.4",
+ "@zilliz/milvus2-sdk-node": ">=2.2.7",
+ "apify-client": "^2.7.1",
+ "axios": "*",
+ "cassandra-driver": "^4.6.4",
+ "cheerio": "^1.0.0-rc.12",
+ "chromadb": "*",
+ "cohere-ai": ">=6.0.0",
+ "d3-dsv": "^2.0.0",
+ "epub2": "^3.0.1",
+ "faiss-node": "^0.3.0",
+ "fast-xml-parser": "^4.2.7",
+ "firebase-admin": "^11.9.0",
+ "google-auth-library": "^8.9.0",
+ "googleapis": "^126.0.1",
+ "hnswlib-node": "^1.4.2",
+ "html-to-text": "^9.0.5",
+ "ignore": "^5.2.0",
+ "ioredis": "^5.3.2",
+ "jsdom": "*",
+ "llmonitor": "*",
+ "lodash": "^4.17.21",
+ "mammoth": "*",
+ "mongodb": "^5.2.0",
+ "mysql2": "^3.3.3",
+ "neo4j-driver": "*",
+ "node-llama-cpp": "*",
+ "notion-to-md": "^3.1.0",
+ "pdf-parse": "1.1.1",
+ "peggy": "^3.0.2",
+ "pg": "^8.11.0",
+ "pg-copy-streams": "^6.0.5",
+ "pickleparser": "^0.1.0",
+ "playwright": "^1.32.1",
+ "portkey-ai": "^0.1.11",
+ "puppeteer": "^19.7.2",
+ "redis": "^4.6.4",
+ "replicate": "^0.18.0",
+ "sonix-speech-recognition": "^2.1.1",
+ "srt-parser-2": "^1.2.2",
+ "typeorm": "^0.3.12",
+ "typesense": "^1.5.3",
+ "usearch": "^1.1.1",
+ "vectordb": "^0.1.4",
+ "voy-search": "0.6.2",
+ "weaviate-ts-client": "^1.4.0",
+ "web-auth-library": "^1.0.3",
+ "youtube-transcript": "^1.0.6",
+ "youtubei.js": "^5.8.0"
+ },
+ "peerDependenciesMeta": {
+ "@aws-crypto/sha256-js": {
+ "optional": true
+ },
+ "@aws-sdk/client-bedrock-runtime": {
+ "optional": true
+ },
+ "@aws-sdk/client-dynamodb": {
+ "optional": true
+ },
+ "@aws-sdk/client-kendra": {
+ "optional": true
+ },
+ "@aws-sdk/client-lambda": {
+ "optional": true
+ },
+ "@aws-sdk/client-s3": {
+ "optional": true
+ },
+ "@aws-sdk/client-sagemaker-runtime": {
+ "optional": true
+ },
+ "@aws-sdk/client-sfn": {
+ "optional": true
+ },
+ "@aws-sdk/credential-provider-node": {
+ "optional": true
+ },
+ "@azure/storage-blob": {
+ "optional": true
+ },
+ "@clickhouse/client": {
+ "optional": true
+ },
+ "@cloudflare/ai": {
+ "optional": true
+ },
+ "@elastic/elasticsearch": {
+ "optional": true
+ },
+ "@getmetal/metal-sdk": {
+ "optional": true
+ },
+ "@getzep/zep-js": {
+ "optional": true
+ },
+ "@gomomento/sdk": {
+ "optional": true
+ },
+ "@google-ai/generativelanguage": {
+ "optional": true
+ },
+ "@google-cloud/storage": {
+ "optional": true
+ },
+ "@huggingface/inference": {
+ "optional": true
+ },
+ "@mozilla/readability": {
+ "optional": true
+ },
+ "@notionhq/client": {
+ "optional": true
+ },
+ "@opensearch-project/opensearch": {
+ "optional": true
+ },
+ "@pinecone-database/pinecone": {
+ "optional": true
+ },
+ "@planetscale/database": {
+ "optional": true
+ },
+ "@qdrant/js-client-rest": {
+ "optional": true
+ },
+ "@raycast/api": {
+ "optional": true
+ },
+ "@smithy/eventstream-codec": {
+ "optional": true
+ },
+ "@smithy/protocol-http": {
+ "optional": true
+ },
+ "@smithy/signature-v4": {
+ "optional": true
+ },
+ "@smithy/util-utf8": {
+ "optional": true
+ },
+ "@supabase/postgrest-js": {
+ "optional": true
+ },
+ "@supabase/supabase-js": {
+ "optional": true
+ },
+ "@tensorflow-models/universal-sentence-encoder": {
+ "optional": true
+ },
+ "@tensorflow/tfjs-converter": {
+ "optional": true
+ },
+ "@tensorflow/tfjs-core": {
+ "optional": true
+ },
+ "@upstash/redis": {
+ "optional": true
+ },
+ "@vercel/postgres": {
+ "optional": true
+ },
+ "@writerai/writer-sdk": {
+ "optional": true
+ },
+ "@xata.io/client": {
+ "optional": true
+ },
+ "@xenova/transformers": {
+ "optional": true
+ },
+ "@zilliz/milvus2-sdk-node": {
+ "optional": true
+ },
+ "apify-client": {
+ "optional": true
+ },
+ "axios": {
+ "optional": true
+ },
+ "cassandra-driver": {
+ "optional": true
+ },
+ "cheerio": {
+ "optional": true
+ },
+ "chromadb": {
+ "optional": true
+ },
+ "cohere-ai": {
+ "optional": true
+ },
+ "d3-dsv": {
+ "optional": true
+ },
+ "epub2": {
+ "optional": true
+ },
+ "faiss-node": {
+ "optional": true
+ },
+ "fast-xml-parser": {
+ "optional": true
+ },
+ "firebase-admin": {
+ "optional": true
+ },
+ "google-auth-library": {
+ "optional": true
+ },
+ "googleapis": {
+ "optional": true
+ },
+ "hnswlib-node": {
+ "optional": true
+ },
+ "html-to-text": {
+ "optional": true
+ },
+ "ignore": {
+ "optional": true
+ },
+ "ioredis": {
+ "optional": true
+ },
+ "jsdom": {
+ "optional": true
+ },
+ "llmonitor": {
+ "optional": true
+ },
+ "lodash": {
+ "optional": true
+ },
+ "mammoth": {
+ "optional": true
+ },
+ "mongodb": {
+ "optional": true
+ },
+ "mysql2": {
+ "optional": true
+ },
+ "neo4j-driver": {
+ "optional": true
+ },
+ "node-llama-cpp": {
+ "optional": true
+ },
+ "notion-to-md": {
+ "optional": true
+ },
+ "pdf-parse": {
+ "optional": true
+ },
+ "peggy": {
+ "optional": true
+ },
+ "pg": {
+ "optional": true
+ },
+ "pg-copy-streams": {
+ "optional": true
+ },
+ "pickleparser": {
+ "optional": true
+ },
+ "playwright": {
+ "optional": true
+ },
+ "portkey-ai": {
+ "optional": true
+ },
+ "puppeteer": {
+ "optional": true
+ },
+ "redis": {
+ "optional": true
+ },
+ "replicate": {
+ "optional": true
+ },
+ "sonix-speech-recognition": {
+ "optional": true
+ },
+ "srt-parser-2": {
+ "optional": true
+ },
+ "typeorm": {
+ "optional": true
+ },
+ "typesense": {
+ "optional": true
+ },
+ "usearch": {
+ "optional": true
+ },
+ "vectordb": {
+ "optional": true
+ },
+ "voy-search": {
+ "optional": true
+ },
+ "weaviate-ts-client": {
+ "optional": true
+ },
+ "web-auth-library": {
+ "optional": true
+ },
+ "youtube-transcript": {
+ "optional": true
+ },
+ "youtubei.js": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/langchainhub": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/langchainhub/-/langchainhub-0.0.6.tgz",
+ "integrity": "sha512-SW6105T+YP1cTe0yMf//7kyshCgvCTyFBMTgH2H3s9rTAR4e+78DA/BBrUL/Mt4Q5eMWui7iGuAYb3pgGsdQ9w=="
+ },
+ "node_modules/langsmith": {
+ "version": "0.0.42",
+ "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.0.42.tgz",
+ "integrity": "sha512-sFuN+e7E+pPBIRaRgFqZh/BRBWNHTZNAwi6uj4kydQawooCZYoJmM5snOkiQrhVSvAhgu6xFhLvmfvkPcKzD7w==",
+ "dependencies": {
+ "@types/uuid": "^9.0.1",
+ "commander": "^10.0.1",
+ "p-queue": "^6.6.2",
+ "p-retry": "4",
+ "uuid": "^9.0.0"
+ },
+ "bin": {
+ "langsmith": "dist/cli/main.cjs"
+ }
+ },
+ "node_modules/md5": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
+ "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
+ "dependencies": {
+ "charenc": "0.0.2",
+ "crypt": "0.0.2",
+ "is-buffer": "~1.1.6"
+ }
+ },
+ "node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/ml-array-mean": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/ml-array-mean/-/ml-array-mean-1.1.6.tgz",
+ "integrity": "sha512-MIdf7Zc8HznwIisyiJGRH9tRigg3Yf4FldW8DxKxpCCv/g5CafTw0RRu51nojVEOXuCQC7DRVVu5c7XXO/5joQ==",
+ "dependencies": {
+ "ml-array-sum": "^1.1.6"
+ }
+ },
+ "node_modules/ml-array-sum": {
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/ml-array-sum/-/ml-array-sum-1.1.6.tgz",
+ "integrity": "sha512-29mAh2GwH7ZmiRnup4UyibQZB9+ZLyMShvt4cH4eTK+cL2oEMIZFnSyB3SS8MlsTh6q/w/yh48KmqLxmovN4Dw==",
+ "dependencies": {
+ "is-any-array": "^2.0.0"
+ }
+ },
+ "node_modules/ml-distance": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/ml-distance/-/ml-distance-4.0.1.tgz",
+ "integrity": "sha512-feZ5ziXs01zhyFUUUeZV5hwc0f5JW0Sh0ckU1koZe/wdVkJdGxcP06KNQuF0WBTj8FttQUzcvQcpcrOp/XrlEw==",
+ "dependencies": {
+ "ml-array-mean": "^1.1.6",
+ "ml-distance-euclidean": "^2.0.0",
+ "ml-tree-similarity": "^1.0.0"
+ }
+ },
+ "node_modules/ml-distance-euclidean": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ml-distance-euclidean/-/ml-distance-euclidean-2.0.0.tgz",
+ "integrity": "sha512-yC9/2o8QF0A3m/0IXqCTXCzz2pNEzvmcE/9HFKOZGnTjatvBbsn4lWYJkxENkA4Ug2fnYl7PXQxnPi21sgMy/Q=="
+ },
+ "node_modules/ml-tree-similarity": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/ml-tree-similarity/-/ml-tree-similarity-1.0.0.tgz",
+ "integrity": "sha512-XJUyYqjSuUQkNQHMscr6tcjldsOoAekxADTplt40QKfwW6nd++1wHWV9AArl0Zvw/TIHgNaZZNvr8QGvE8wLRg==",
+ "dependencies": {
+ "binary-search": "^1.3.5",
+ "num-sort": "^2.0.0"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
+ },
+ "node_modules/node-domexception": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
+ "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/jimmywarting"
+ },
+ {
+ "type": "github",
+ "url": "https://paypal.me/jimmywarting"
+ }
+ ],
+ "engines": {
+ "node": ">=10.5.0"
+ }
+ },
+ "node_modules/node-fetch": {
+ "version": "2.7.0",
+ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
+ "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
+ "dependencies": {
+ "whatwg-url": "^5.0.0"
+ },
+ "engines": {
+ "node": "4.x || >=6.0.0"
+ },
+ "peerDependencies": {
+ "encoding": "^0.1.0"
+ },
+ "peerDependenciesMeta": {
+ "encoding": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/num-sort": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/num-sort/-/num-sort-2.1.0.tgz",
+ "integrity": "sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg==",
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/object-hash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
+ "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/openai": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/openai/-/openai-4.4.0.tgz",
+ "integrity": "sha512-JN0t628Kh95T0IrXl0HdBqnlJg+4Vq0Bnh55tio+dfCnyzHvMLiWyCM9m726MAJD2YkDU4/8RQB6rNbEq9ct2w==",
+ "dependencies": {
+ "@types/node": "^18.11.18",
+ "@types/node-fetch": "^2.6.4",
+ "abort-controller": "^3.0.0",
+ "agentkeepalive": "^4.2.1",
+ "digest-fetch": "^1.3.0",
+ "form-data-encoder": "1.7.2",
+ "formdata-node": "^4.3.2",
+ "node-fetch": "^2.6.7"
+ },
+ "bin": {
+ "openai": "bin/cli"
+ }
+ },
+ "node_modules/openapi-types": {
+ "version": "12.1.3",
+ "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz",
+ "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="
+ },
+ "node_modules/p-finally": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
+ "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/p-queue": {
+ "version": "6.6.2",
+ "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
+ "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
+ "dependencies": {
+ "eventemitter3": "^4.0.4",
+ "p-timeout": "^3.2.0"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-retry": {
+ "version": "4.6.2",
+ "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
+ "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
+ "dependencies": {
+ "@types/retry": "0.12.0",
+ "retry": "^0.13.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/p-timeout": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
+ "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
+ "dependencies": {
+ "p-finally": "^1.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/retry": {
+ "version": "0.13.1",
+ "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
+ "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
+ "engines": {
+ "node": ">= 4"
+ }
+ },
+ "node_modules/tr46": {
+ "version": "0.0.3",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
+ "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
+ },
+ "node_modules/typescript": {
+ "version": "5.2.2",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
+ "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==",
+ "dev": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/uuid": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
+ "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
+ "funding": [
+ "https://github.com/sponsors/broofa",
+ "https://github.com/sponsors/ctavan"
+ ],
+ "bin": {
+ "uuid": "dist/bin/uuid"
+ }
+ },
+ "node_modules/web-streams-polyfill": {
+ "version": "4.0.0-beta.3",
+ "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz",
+ "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==",
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/webidl-conversions": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
+ "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
+ },
+ "node_modules/whatwg-url": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
+ "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
+ "dependencies": {
+ "tr46": "~0.0.3",
+ "webidl-conversions": "^3.0.0"
+ }
+ },
+ "node_modules/yaml": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.2.tgz",
+ "integrity": "sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==",
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/zod": {
+ "version": "3.22.4",
+ "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.4.tgz",
+ "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==",
+ "funding": {
+ "url": "https://github.com/sponsors/colinhacks"
+ }
+ },
+ "node_modules/zod-to-json-schema": {
+ "version": "3.21.4",
+ "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.21.4.tgz",
+ "integrity": "sha512-fjUZh4nQ1s6HMccgIeE0VP4QG/YRGPmyjO9sAh890aQKPEk3nqbfUXhMFaC+Dr5KvYBm8BCyvfpZf2jY9aGSsw==",
+ "peerDependencies": {
+ "zod": "^3.21.4"
+ }
+ }
+ }
+}
diff --git a/ollama/examples/langchain-typescript-simple/package.json b/ollama/examples/langchain-typescript-simple/package.json
new file mode 100755
index 0000000000000000000000000000000000000000..5d6a5b88e7771b383b2902e11e2245a86eba8cd7
--- /dev/null
+++ b/ollama/examples/langchain-typescript-simple/package.json
@@ -0,0 +1,13 @@
+{
+ "scripts": {
+ "start": "tsx main.ts"
+ },
+ "devDependencies": {
+ "tsx": "^4.6.2",
+ "typescript": "^5.3.3"
+ },
+ "dependencies": {
+ "langchain": "^0.0.165",
+ "readline": "^1.3.0"
+ }
+}
diff --git a/ollama/examples/modelfile-mario/Modelfile b/ollama/examples/modelfile-mario/Modelfile
new file mode 100755
index 0000000000000000000000000000000000000000..a3747086439eb7786a162783c07c8e1dda320fb2
--- /dev/null
+++ b/ollama/examples/modelfile-mario/Modelfile
@@ -0,0 +1,5 @@
+FROM llama3.1
+PARAMETER temperature 1
+SYSTEM """
+You are Mario from super mario bros, acting as an assistant.
+"""
diff --git a/ollama/examples/modelfile-mario/logo.png b/ollama/examples/modelfile-mario/logo.png
new file mode 100755
index 0000000000000000000000000000000000000000..1ef2564623051121def212c7f366e55ebff0ca72
Binary files /dev/null and b/ollama/examples/modelfile-mario/logo.png differ
diff --git a/ollama/examples/modelfile-mario/readme.md b/ollama/examples/modelfile-mario/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..c3f34197a1ec1970cc2e42b814f16ec034535e79
--- /dev/null
+++ b/ollama/examples/modelfile-mario/readme.md
@@ -0,0 +1,43 @@
+
+
+# Example character: Mario
+
+This example shows how to create a basic character using Llama3.1 as the base model.
+
+To run this example:
+
+1. Download the Modelfile
+2. `ollama pull llama3.1` to get the base model used in the model file.
+3. `ollama create NAME -f ./Modelfile`
+4. `ollama run NAME`
+
+Ask it some questions like "Who are you?" or "Is Peach in trouble again?"
+
+## Editing this file
+
+What the model file looks like:
+
+```
+FROM llama3.1
+PARAMETER temperature 1
+SYSTEM """
+You are Mario from Super Mario Bros, acting as an assistant.
+"""
+```
+
+What if you want to change its behaviour?
+
+- Try changing the prompt
+- Try changing the parameters [Docs](https://github.com/ollama/ollama/blob/main/docs/modelfile.md)
+- Try changing the model (e.g. An uncensored model by `FROM wizard-vicuna` this is the wizard-vicuna uncensored model )
+
+Once the changes are made,
+
+1. `ollama create NAME -f ./Modelfile`
+2. `ollama run NAME`
+3. Iterate until you are happy with the results.
+
+Notes:
+
+- This example is for research purposes only. There is no affiliation with any entity.
+- When using an uncensored model, please be aware that it may generate offensive content.
diff --git a/ollama/examples/python-dockerit/Modelfile b/ollama/examples/python-dockerit/Modelfile
new file mode 100755
index 0000000000000000000000000000000000000000..acd63644f2a46f80a235298829cd00e793011c48
--- /dev/null
+++ b/ollama/examples/python-dockerit/Modelfile
@@ -0,0 +1,20 @@
+FROM mistral
+SYSTEM """
+You are an experienced Devops engineer focused on docker. When given specifications for a particular need or application you know the best way to host that within a docker container. For instance if someone tells you they want an nginx server to host files located at /web you will answer as follows
+
+---start
+FROM nginx:alpine
+COPY /myweb /usr/share/nginx/html
+EXPOSE 80
+---end
+
+Notice that the answer you should give is just the contents of the dockerfile with no explanation and there are three dashes and the word start at the beginning and 3 dashes and the word end. The full output can be piped into a file and run as is. Here is another example. The user will ask to launch a Postgres server with a password of abc123. And the response should be
+
+---start
+FROM postgres:latest
+ENV POSTGRES_PASSWORD=abc123
+EXPOSE 5432
+---end
+
+Again it's just the contents of the dockerfile and nothing else.
+"""
diff --git a/ollama/examples/python-dockerit/README.md b/ollama/examples/python-dockerit/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..2ba00ce2a3c98c03feb2ba33c036b46d16e0201a
--- /dev/null
+++ b/ollama/examples/python-dockerit/README.md
@@ -0,0 +1,31 @@
+# DockerIt
+
+DockerIt is a tool to help you build and run your application in a Docker container. It consists of a model that defines the system prompt and model weights to use, along with a python script to then build the container and run the image automatically.
+
+## Running the Example
+
+1. Ensure you have the `mattw/dockerit` model installed:
+
+ ```bash
+ ollama pull mattw/dockerit
+ ```
+
+2. Make sure Docker is running on your machine.
+
+3. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+4. Run the example:
+
+ ```bash
+ python dockerit.py "simple postgres server with admin password set to 123"
+ ```
+
+5. Enter the name you would like to use for your container image.
+
+## Caveats
+
+This is a simple example. It's assuming the Dockerfile content generated is going to work. In many cases, even with simple web servers, it fails when trying to copy files that don't exist. It's simply an example of what you could possibly do.
diff --git a/ollama/examples/python-dockerit/dockerit.py b/ollama/examples/python-dockerit/dockerit.py
new file mode 100755
index 0000000000000000000000000000000000000000..6a288d9064a3fe887e3c1fc2ce08d73c6fdfd0de
--- /dev/null
+++ b/ollama/examples/python-dockerit/dockerit.py
@@ -0,0 +1,17 @@
+import requests, json, docker, io, sys
+inputDescription = " ".join(sys.argv[1:])
+imageName = input("Enter the name of the image: ")
+client = docker.from_env()
+s = requests.Session()
+output=""
+with s.post('http://localhost:11434/api/generate', json={'model': 'mattw/dockerit', 'prompt': inputDescription}, stream=True) as r:
+ for line in r.iter_lines():
+ if line:
+ j = json.loads(line)
+ if "response" in j:
+ output = output +j["response"]
+output = output[output.find("---start")+9:output.find("---end")-1]
+f = io.BytesIO(bytes(output, 'utf-8'))
+client.images.build(fileobj=f, tag=imageName)
+container = client.containers.run(imageName, detach=True)
+print("Container named", container.name, " started with id: ",container.id)
diff --git a/ollama/examples/python-dockerit/requirements.txt b/ollama/examples/python-dockerit/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..6d0eac4b37afd0c217cb6517e9d040f231f72fe2
--- /dev/null
+++ b/ollama/examples/python-dockerit/requirements.txt
@@ -0,0 +1 @@
+docker
\ No newline at end of file
diff --git a/ollama/examples/python-json-datagenerator/predefinedschema.py b/ollama/examples/python-json-datagenerator/predefinedschema.py
new file mode 100755
index 0000000000000000000000000000000000000000..68090ad790bd22ecc3773ce93aef815271f48994
--- /dev/null
+++ b/ollama/examples/python-json-datagenerator/predefinedschema.py
@@ -0,0 +1,31 @@
+import requests
+import json
+import random
+
+model = "llama3.1"
+template = {
+ "firstName": "",
+ "lastName": "",
+ "address": {
+ "street": "",
+ "city": "",
+ "state": "",
+ "zipCode": ""
+ },
+ "phoneNumber": ""
+}
+
+prompt = f"generate one realistically believable sample data set of a persons first name, last name, address in the US, and phone number. \nUse the following template: {json.dumps(template)}."
+
+data = {
+ "prompt": prompt,
+ "model": model,
+ "format": "json",
+ "stream": False,
+ "options": {"temperature": 2.5, "top_p": 0.99, "top_k": 100},
+}
+
+print(f"Generating a sample user")
+response = requests.post("http://localhost:11434/api/generate", json=data, stream=False)
+json_data = json.loads(response.text)
+print(json.dumps(json.loads(json_data["response"]), indent=2))
diff --git a/ollama/examples/python-json-datagenerator/randomaddresses.py b/ollama/examples/python-json-datagenerator/randomaddresses.py
new file mode 100755
index 0000000000000000000000000000000000000000..878c98037d97387e9b8480bf497b545f8b9d9878
--- /dev/null
+++ b/ollama/examples/python-json-datagenerator/randomaddresses.py
@@ -0,0 +1,31 @@
+import requests
+import json
+import random
+
+countries = [
+ "United States",
+ "United Kingdom",
+ "the Netherlands",
+ "Germany",
+ "Mexico",
+ "Canada",
+ "France",
+]
+country = random.choice(countries)
+model = "llama3.1"
+
+prompt = f"generate one realistically believable sample data set of a persons first name, last name, address in {country}, and phone number. Do not use common names. Respond using JSON. Key names should have no backslashes, values should use plain ascii with no special characters."
+
+data = {
+ "prompt": prompt,
+ "model": model,
+ "format": "json",
+ "stream": False,
+ "options": {"temperature": 2.5, "top_p": 0.99, "top_k": 100},
+}
+
+print(f"Generating a sample user in {country}")
+response = requests.post("http://localhost:11434/api/generate", json=data, stream=False)
+json_data = json.loads(response.text)
+
+print(json.dumps(json.loads(json_data["response"]), indent=2))
diff --git a/ollama/examples/python-json-datagenerator/readme.md b/ollama/examples/python-json-datagenerator/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..5b444dff13c131f9ab2c3eb8901ab4104db0e924
--- /dev/null
+++ b/ollama/examples/python-json-datagenerator/readme.md
@@ -0,0 +1,60 @@
+# JSON Output Example
+
+
+
+There are two python scripts in this example. `randomaddresses.py` generates random addresses from different countries. `predefinedschema.py` sets a template for the model to fill in.
+
+## Running the Example
+
+1. Ensure you have the `llama3.1` model installed:
+
+ ```bash
+ ollama pull llama3.1
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the Random Addresses example:
+
+ ```bash
+ python randomaddresses.py
+ ```
+
+4. Run the Predefined Schema example:
+
+ ```bash
+ python predefinedschema.py
+ ```
+
+## Review the Code
+
+Both programs are basically the same, with a different prompt for each, demonstrating two different ideas. The key part of getting JSON out of a model is to state in the prompt or system prompt that it should respond using JSON, and specifying the `format` as `json` in the data body.
+
+```python
+prompt = f"generate one realistically believable sample data set of a persons first name, last name, address in {country}, and phone number. Do not use common names. Respond using JSON. Key names should with no backslashes, values should use plain ascii with no special characters."
+
+data = {
+ "prompt": prompt,
+ "model": model,
+ "format": "json",
+ "stream": False,
+ "options": {"temperature": 2.5, "top_p": 0.99, "top_k": 100},
+}
+```
+
+When running `randomaddresses.py` you will see that the schema changes and adapts to the chosen country.
+
+In `predefinedschema.py`, a template has been specified in the prompt as well. It's been defined as JSON and then dumped into the prompt string to make it easier to work with.
+
+Both examples turn streaming off so that we end up with the completed JSON all at once. We need to convert the `response.text` to JSON so that when we output it as a string we can set the indent spacing to make the output easy to read.
+
+```python
+response = requests.post("http://localhost:11434/api/generate", json=data, stream=False)
+json_data = json.loads(response.text)
+
+print(json.dumps(json.loads(json_data["response"]), indent=2))
+```
diff --git a/ollama/examples/python-json-datagenerator/requirements.txt b/ollama/examples/python-json-datagenerator/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..9688b8ec02db6cbc34b987b2167075033d4ec11b
--- /dev/null
+++ b/ollama/examples/python-json-datagenerator/requirements.txt
@@ -0,0 +1 @@
+Requests==2.31.0
diff --git a/ollama/examples/python-loganalysis/Modelfile b/ollama/examples/python-loganalysis/Modelfile
new file mode 100755
index 0000000000000000000000000000000000000000..5237cb6eb910f99c1a4c3ed3effd234aa22c34bb
--- /dev/null
+++ b/ollama/examples/python-loganalysis/Modelfile
@@ -0,0 +1,8 @@
+FROM codebooga:latest
+
+SYSTEM """
+You are a log file analyzer. You will receive a set of lines from a log file for some software application, find the errors and other interesting aspects of the logs, and explain them so a new user can understand what they mean. If there are any steps they can do to resolve them, list the steps in your answer.
+"""
+
+PARAMETER TEMPERATURE 0.3
+
diff --git a/ollama/examples/python-loganalysis/loganalysis.py b/ollama/examples/python-loganalysis/loganalysis.py
new file mode 100755
index 0000000000000000000000000000000000000000..4c7eccbd4ab1f81cbc27117af3e892595a4839e9
--- /dev/null
+++ b/ollama/examples/python-loganalysis/loganalysis.py
@@ -0,0 +1,41 @@
+import sys
+import re
+import requests
+import json
+
+# prelines and postlines represent the number of lines of context to include in the output around the error
+prelines = 10
+postlines = 10
+
+def find_errors_in_log_file():
+ if len(sys.argv) < 2:
+ print("Usage: python loganalysis.py ")
+ return
+
+ log_file_path = sys.argv[1]
+ with open(log_file_path, 'r') as log_file:
+ log_lines = log_file.readlines()
+
+ error_logs = []
+ for i, line in enumerate(log_lines):
+ if "error" in line.lower():
+ start_index = max(0, i - prelines)
+ end_index = min(len(log_lines), i + postlines + 1)
+ error_logs.extend(log_lines[start_index:end_index])
+
+ return error_logs
+
+error_logs = find_errors_in_log_file()
+
+data = {
+ "prompt": "\n".join(error_logs),
+ "model": "mattw/loganalyzer"
+}
+
+response = requests.post("http://localhost:11434/api/generate", json=data, stream=True)
+for line in response.iter_lines():
+ if line:
+ json_data = json.loads(line)
+ if json_data['done'] == False:
+ print(json_data['response'], end='', flush=True)
+
diff --git a/ollama/examples/python-loganalysis/logtest.logfile b/ollama/examples/python-loganalysis/logtest.logfile
new file mode 100755
index 0000000000000000000000000000000000000000..e4181bfe3d44dff3f3ce64c9cbdc3db51baf6cb8
--- /dev/null
+++ b/ollama/examples/python-loganalysis/logtest.logfile
@@ -0,0 +1,32 @@
+2023-11-10 07:17:40 /docker-entrypoint.sh: /docker-entrypoint.d/ is not empty, will attempt to perform configuration
+2023-11-10 07:17:40 /docker-entrypoint.sh: Looking for shell scripts in /docker-entrypoint.d/
+2023-11-10 07:17:40 /docker-entrypoint.sh: Launching /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
+2023-11-10 07:17:40 10-listen-on-ipv6-by-default.sh: info: Getting the checksum of /etc/nginx/conf.d/default.conf
+2023-11-10 07:17:40 10-listen-on-ipv6-by-default.sh: info: Enabled listen on IPv6 in /etc/nginx/conf.d/default.conf
+2023-11-10 07:17:40 /docker-entrypoint.sh: Sourcing /docker-entrypoint.d/15-local-resolvers.envsh
+2023-11-10 07:17:40 /docker-entrypoint.sh: Launching /docker-entrypoint.d/20-envsubst-on-templates.sh
+2023-11-10 07:17:40 /docker-entrypoint.sh: Launching /docker-entrypoint.d/30-tune-worker-processes.sh
+2023-11-10 07:17:40 /docker-entrypoint.sh: Configuration complete; ready for start up
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: using the "epoll" event method
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: nginx/1.25.3
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: built by gcc 12.2.0 (Debian 12.2.0-14)
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: OS: Linux 6.4.16-linuxkit
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: getrlimit(RLIMIT_NOFILE): 1048576:1048576
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker processes
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 29
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 30
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 31
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 32
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 33
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 34
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 35
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 36
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 37
+2023-11-10 07:17:40 2023/11/10 13:17:40 [notice] 1#1: start worker process 38
+2023-11-10 07:17:44 192.168.65.1 - - [10/Nov/2023:13:17:43 +0000] "GET / HTTP/1.1" 200 615 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" "-"
+2023-11-10 07:17:44 2023/11/10 13:17:44 [error] 29#29: *1 open() "/usr/share/nginx/html/favicon.ico" failed (2: No such file or directory), client: 192.168.65.1, server: localhost, request: "GET /favicon.ico HTTP/1.1", host: "localhost:8080", referrer: "http://localhost:8080/"
+2023-11-10 07:17:44 192.168.65.1 - - [10/Nov/2023:13:17:44 +0000] "GET /favicon.ico HTTP/1.1" 404 555 "http://localhost:8080/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" "-"
+2023-11-10 07:17:50 2023/11/10 13:17:50 [error] 29#29: *1 open() "/usr/share/nginx/html/ahstat" failed (2: No such file or directory), client: 192.168.65.1, server: localhost, request: "GET /ahstat HTTP/1.1", host: "localhost:8080"
+2023-11-10 07:17:50 192.168.65.1 - - [10/Nov/2023:13:17:50 +0000] "GET /ahstat HTTP/1.1" 404 555 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" "-"
+2023-11-10 07:18:53 2023/11/10 13:18:53 [error] 29#29: *1 open() "/usr/share/nginx/html/ahstat" failed (2: No such file or directory), client: 192.168.65.1, server: localhost, request: "GET /ahstat HTTP/1.1", host: "localhost:8080"
+2023-11-10 07:18:53 192.168.65.1 - - [10/Nov/2023:13:18:53 +0000] "GET /ahstat HTTP/1.1" 404 555 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" "-"
diff --git a/ollama/examples/python-loganalysis/readme.md b/ollama/examples/python-loganalysis/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..4be0baaaf8d672f873989a616d48c5ed5a5030eb
--- /dev/null
+++ b/ollama/examples/python-loganalysis/readme.md
@@ -0,0 +1,70 @@
+# Log Analysis example
+
+
+
+This example shows one possible way to create a log file analyzer. It uses the model **mattw/loganalyzer** which is based on **codebooga**, a 34b parameter model.
+
+To use it, run:
+
+`python loganalysis.py `
+
+You can try this with the `logtest.logfile` file included in this directory.
+
+## Running the Example
+
+1. Ensure you have the `mattw/loganalyzer` model installed:
+
+ ```bash
+ ollama pull mattw/loganalyzer
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python loganalysis.py logtest.logfile
+ ```
+
+## Review the code
+
+The first part of this example is a Modelfile that takes `codebooga` and applies a new System Prompt:
+
+```plaintext
+SYSTEM """
+You are a log file analyzer. You will receive a set of lines from a log file for some software application, find the errors and other interesting aspects of the logs, and explain them so a new user can understand what they mean. If there are any steps they can do to resolve them, list the steps in your answer.
+"""
+```
+
+This model is available at https://ollama.com/mattw/loganalyzer. You can customize it and add to your own namespace using the command `ollama create -f ` then `ollama push `.
+
+Then loganalysis.py scans all the lines in the given log file and searches for the word 'error'. When the word is found, the 10 lines before and after are set as the prompt for a call to the Generate API.
+
+```python
+data = {
+ "prompt": "\n".join(error_logs),
+ "model": "mattw/loganalyzer"
+}
+```
+
+Finally, the streamed output is parsed and the response field in the output is printed to the line.
+
+```python
+response = requests.post("http://localhost:11434/api/generate", json=data, stream=True)
+for line in response.iter_lines():
+ if line:
+ json_data = json.loads(line)
+ if json_data['done'] == False:
+ print(json_data['response'], end='')
+
+```
+
+## Next Steps
+
+There is a lot more that can be done here. This is a simple way to detect errors, looking for the word error. Perhaps it would be interesting to find anomalous activity in the logs. It could be interesting to create embeddings for each line and compare them, looking for similar lines. Or look into applying Levenshtein Distance algorithms to find similar lines to help identify the anomalous lines.
+
+Try different models and different prompts to analyze the data. You could consider adding retrieval augmented generation (RAG) to this to help understand newer log formats.
diff --git a/ollama/examples/python-loganalysis/requirements.txt b/ollama/examples/python-loganalysis/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..9688b8ec02db6cbc34b987b2167075033d4ec11b
--- /dev/null
+++ b/ollama/examples/python-loganalysis/requirements.txt
@@ -0,0 +1 @@
+Requests==2.31.0
diff --git a/ollama/examples/python-rag-newssummary/README.md b/ollama/examples/python-rag-newssummary/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..51a68be189bdb5461682295bddeb886887bac1ab
--- /dev/null
+++ b/ollama/examples/python-rag-newssummary/README.md
@@ -0,0 +1,35 @@
+# News Summarizer
+
+This example goes through a series of steps:
+
+ 1. You choose a topic area (e.g., "news", "NVidia", "music", etc.).
+ 2. Gets the most recent articles on that topic from various sources.
+ 3. Uses Ollama to summarize each article.
+ 4. Creates chunks of sentences from each article.
+ 5. Uses Sentence Transformers to generate embeddings for each of those chunks.
+ 6. You enter a question regarding the summaries shown.
+ 7. Uses Sentence Transformers to generate an embedding for that question.
+ 8. Uses the embedded question to find the most similar chunks.
+ 9. Feeds all that to Ollama to generate a good answer to your question based on these news articles.
+
+This example lets you pick from a few different topic areas, then summarize the most recent x articles for that topic. It then creates chunks of sentences from each article and then generates embeddings for each of those chunks.
+
+## Running the Example
+
+1. Ensure you have the `mistral-openorca` model installed:
+
+ ```bash
+ ollama pull mistral-openorca
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python summ.py
+ ```
diff --git a/ollama/examples/python-rag-newssummary/requirements.txt b/ollama/examples/python-rag-newssummary/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..1a92729a1eca49748c4394a83806d052213b7f53
--- /dev/null
+++ b/ollama/examples/python-rag-newssummary/requirements.txt
@@ -0,0 +1,9 @@
+beautifulsoup4==4.12.2
+feedparser==6.0.10
+mattsollamatools==0.0.8
+newspaper3k==0.2.8
+nltk==3.8.1
+numpy==1.24.3
+Requests==2.31.0
+scikit_learn==1.3.0
+sentence_transformers==2.2.2
diff --git a/ollama/examples/python-rag-newssummary/summ.py b/ollama/examples/python-rag-newssummary/summ.py
new file mode 100755
index 0000000000000000000000000000000000000000..4993cfca5102f07b3c838a83b210c788693a5c8e
--- /dev/null
+++ b/ollama/examples/python-rag-newssummary/summ.py
@@ -0,0 +1,86 @@
+import curses
+import json
+from utils import get_url_for_topic, topic_urls, menu, getUrls, get_summary, getArticleText, knn_search
+import requests
+from sentence_transformers import SentenceTransformer
+from mattsollamatools import chunker
+
+if __name__ == "__main__":
+ chosen_topic = curses.wrapper(menu)
+ print("Here is your news summary:\n")
+ urls = getUrls(chosen_topic, n=5)
+ model = SentenceTransformer('all-MiniLM-L6-v2')
+ allEmbeddings = []
+
+ for url in urls:
+ article={}
+ article['embeddings'] = []
+ article['url'] = url
+ text = getArticleText(url)
+ summary = get_summary(text)
+ chunks = chunker(text) # Use the chunk_text function from web_utils
+ embeddings = model.encode(chunks)
+ for (chunk, embedding) in zip(chunks, embeddings):
+ item = {}
+ item['source'] = chunk
+ item['embedding'] = embedding.tolist() # Convert NumPy array to list
+ item['sourcelength'] = len(chunk)
+ article['embeddings'].append(item)
+
+ allEmbeddings.append(article)
+
+ print(f"{summary}\n")
+
+
+ while True:
+ context = []
+ # Input a question from the user
+ question = input("Enter your question about the news, or type quit: ")
+
+ if question.lower() == 'quit':
+ break
+
+ # Embed the user's question
+ question_embedding = model.encode([question])
+
+ # Perform KNN search to find the best matches (indices and source text)
+ best_matches = knn_search(question_embedding, allEmbeddings, k=10)
+
+
+ sourcetext=""
+ for i, (index, source_text) in enumerate(best_matches, start=1):
+ sourcetext += f"{i}. Index: {index}, Source Text: {source_text}"
+
+ systemPrompt = f"Only use the following information to answer the question. Do not use anything else: {sourcetext}"
+
+ url = "http://localhost:11434/api/generate"
+
+ payload = {
+ "model": "mistral-openorca",
+ "prompt": question,
+ "system": systemPrompt,
+ "stream": False,
+ "context": context
+ }
+
+ # Convert the payload to a JSON string
+ payload_json = json.dumps(payload)
+
+ # Set the headers to specify JSON content
+ headers = {
+ "Content-Type": "application/json"
+ }
+
+ # Send the POST request
+ response = requests.post(url, data=payload_json, headers=headers)
+
+ # Check the response
+ if response.status_code == 200:
+ output = json.loads(response.text)
+ context = output['context']
+ print(output['response']+ "\n")
+
+
+ else:
+ print(f"Request failed with status code {response.status_code}")
+
diff --git a/ollama/examples/python-rag-newssummary/utils.py b/ollama/examples/python-rag-newssummary/utils.py
new file mode 100755
index 0000000000000000000000000000000000000000..0bce011b4aa590677d2ef1bada34bfc9439d9850
--- /dev/null
+++ b/ollama/examples/python-rag-newssummary/utils.py
@@ -0,0 +1,108 @@
+import curses
+import feedparser
+import requests
+import unicodedata
+import json
+from newspaper import Article
+from bs4 import BeautifulSoup
+from nltk.tokenize import sent_tokenize, word_tokenize
+import numpy as np
+from sklearn.neighbors import NearestNeighbors
+from mattsollamatools import chunker
+
+# Create a dictionary to store topics and their URLs
+topic_urls = {
+ "Mac": "https://9to5mac.com/guides/mac/feed",
+ "News": "http://www.npr.org/rss/rss.php?id=1001",
+ "Nvidia": "https://nvidianews.nvidia.com/releases.xml",
+ "Raspberry Pi": "https://www.raspberrypi.com/news/feed/",
+ "Music": "https://www.billboard.com/c/music/music-news/feed/"
+}
+
+# Use curses to create a menu of topics
+def menu(stdscr):
+ chosen_topic = get_url_for_topic(stdscr)
+ url = topic_urls[chosen_topic] if chosen_topic in topic_urls else "Topic not found"
+
+ stdscr.addstr(len(topic_urls) + 3, 0, f"Selected URL for {chosen_topic}: {url}")
+ stdscr.refresh()
+
+ return chosen_topic
+
+# You have chosen a topic. Now return the url for that topic
+def get_url_for_topic(stdscr):
+ curses.curs_set(0) # Hide the cursor
+ stdscr.clear()
+
+ stdscr.addstr(0, 0, "Choose a topic using the arrow keys (Press Enter to select):")
+
+ # Create a list of topics
+ topics = list(topic_urls.keys())
+ current_topic = 0
+
+ while True:
+ for i, topic in enumerate(topics):
+ if i == current_topic:
+ stdscr.addstr(i + 2, 2, f"> {topic}")
+ else:
+ stdscr.addstr(i + 2, 2, f" {topic}")
+
+ stdscr.refresh()
+
+ key = stdscr.getch()
+
+ if key == curses.KEY_DOWN and current_topic < len(topics) - 1:
+ current_topic += 1
+ elif key == curses.KEY_UP and current_topic > 0:
+ current_topic -= 1
+ elif key == 10: # Enter key
+ return topic_urls[topics[current_topic]]
+
+# Get the last N URLs from an RSS feed
+def getUrls(feed_url, n=20):
+ feed = feedparser.parse(feed_url)
+ entries = feed.entries[-n:]
+ urls = [entry.link for entry in entries]
+ return urls
+
+# Often there are a bunch of ads and menus on pages for a news article. This uses newspaper3k to get just the text of just the article.
+def getArticleText(url):
+ article = Article(url)
+ article.download()
+ article.parse()
+ return article.text
+
+def get_summary(text):
+ systemPrompt = "Write a concise summary of the text, return your responses with 5 lines that cover the key points of the text given."
+ prompt = text
+
+ url = "http://localhost:11434/api/generate"
+
+ payload = {
+ "model": "mistral-openorca",
+ "prompt": prompt,
+ "system": systemPrompt,
+ "stream": False
+ }
+ payload_json = json.dumps(payload)
+ headers = {"Content-Type": "application/json"}
+ response = requests.post(url, data=payload_json, headers=headers)
+
+ return json.loads(response.text)["response"]
+
+# Perform K-nearest neighbors (KNN) search
+def knn_search(question_embedding, embeddings, k=5):
+ X = np.array([item['embedding'] for article in embeddings for item in article['embeddings']])
+ source_texts = [item['source'] for article in embeddings for item in article['embeddings']]
+
+ # Fit a KNN model on the embeddings
+ knn = NearestNeighbors(n_neighbors=k, metric='cosine')
+ knn.fit(X)
+
+ # Find the indices and distances of the k-nearest neighbors
+ distances, indices = knn.kneighbors(question_embedding, n_neighbors=k)
+
+ # Get the indices and source texts of the best matches
+ best_matches = [(indices[0][i], source_texts[indices[0][i]]) for i in range(k)]
+
+ return best_matches
diff --git a/ollama/examples/python-simplechat/client.py b/ollama/examples/python-simplechat/client.py
new file mode 100755
index 0000000000000000000000000000000000000000..85043d5f4f24a52ef4c15bd668f513d589d7bcd1
--- /dev/null
+++ b/ollama/examples/python-simplechat/client.py
@@ -0,0 +1,48 @@
+import json
+import requests
+
+# NOTE: ollama must be running for this to work, start the ollama app or run `ollama serve`
+model = "llama3.1" # TODO: update this for whatever model you wish to use
+
+
+def chat(messages):
+ r = requests.post(
+ "http://0.0.0.0:11434/api/chat",
+ json={"model": model, "messages": messages, "stream": True},
+ stream=True
+ )
+ r.raise_for_status()
+ output = ""
+
+ for line in r.iter_lines():
+ body = json.loads(line)
+ if "error" in body:
+ raise Exception(body["error"])
+ if body.get("done") is False:
+ message = body.get("message", "")
+ content = message.get("content", "")
+ output += content
+ # the response streams one token at a time, print that as we receive it
+ print(content, end="", flush=True)
+
+ if body.get("done", False):
+ message["content"] = output
+ return message
+
+
+def main():
+ messages = []
+
+ while True:
+ user_input = input("Enter a prompt: ")
+ if not user_input:
+ exit()
+ print()
+ messages.append({"role": "user", "content": user_input})
+ message = chat(messages)
+ messages.append(message)
+ print("\n\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ollama/examples/python-simplechat/readme.md b/ollama/examples/python-simplechat/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..4c2ded4d8fd035c87d7ef02d103cdd54bc6181c9
--- /dev/null
+++ b/ollama/examples/python-simplechat/readme.md
@@ -0,0 +1,44 @@
+# Simple Chat Example
+
+The **chat** endpoint is one of two ways to generate text from an LLM with Ollama, and is introduced in version 0.1.14. At a high level, you provide the endpoint an array of objects with a role and content specified. Then with each output and prompt, you add more of those role/content objects, which builds up the history.
+
+## Running the Example
+
+1. Ensure you have the `llama3.1` model installed:
+
+ ```bash
+ ollama pull llama3.1
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python client.py
+ ```
+
+## Review the Code
+
+You can see in the **chat** function that actually calling the endpoint is done simply with:
+
+```python
+r = requests.post(
+ "http://0.0.0.0:11434/api/chat",
+ json={"model": model, "messages": messages, "stream": True},
+)
+```
+
+With the **generate** endpoint, you need to provide a `prompt`. But with **chat**, you provide `messages`. And the resulting stream of responses includes a `message` object with a `content` field.
+
+The final JSON object doesn't provide the full content, so you will need to build the content yourself.
+
+In the **main** function, we collect `user_input` and add it as a message to our messages and that is passed to the chat function. When the LLM is done responding the output is added as another message.
+
+## Next Steps
+
+In this example, all generations are kept. You might want to experiment with summarizing everything older than 10 conversations to enable longer history with less context being used.
diff --git a/ollama/examples/python-simplechat/requirements.txt b/ollama/examples/python-simplechat/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..9688b8ec02db6cbc34b987b2167075033d4ec11b
--- /dev/null
+++ b/ollama/examples/python-simplechat/requirements.txt
@@ -0,0 +1 @@
+Requests==2.31.0
diff --git a/ollama/examples/python-simplegenerate/README.md b/ollama/examples/python-simplegenerate/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..a9175207358e86bb0e8558624b1913cbfbdb9f73
--- /dev/null
+++ b/ollama/examples/python-simplegenerate/README.md
@@ -0,0 +1,29 @@
+# Simple Generate Example
+
+This is a simple example using the **Generate** endpoint.
+
+## Running the Example
+
+1. Ensure you have the `stablelm-zephyr` model installed:
+
+ ```bash
+ ollama pull stablelm-zephyr
+ ```
+
+2. Install the Python Requirements.
+
+ ```bash
+ pip install -r requirements.txt
+ ```
+
+3. Run the example:
+
+ ```bash
+ python client.py
+ ```
+
+## Review the Code
+
+The **main** function simply asks for input, then passes that to the generate function. The output from generate is then passed back to generate on the next run.
+
+The **generate** function uses `requests.post` to call `/api/generate`, passing the model, prompt, and context. The `generate` endpoint returns a stream of JSON blobs that are then iterated through, looking for the response values. That is then printed out. The final JSON object includes the full context of the conversation so far, and that is the return value from the function.
diff --git a/ollama/examples/python-simplegenerate/client.py b/ollama/examples/python-simplegenerate/client.py
new file mode 100755
index 0000000000000000000000000000000000000000..7b5cf810911bc0c7fe25cf6ee31837bf3484ab49
--- /dev/null
+++ b/ollama/examples/python-simplegenerate/client.py
@@ -0,0 +1,40 @@
+import json
+import requests
+
+# NOTE: ollama must be running for this to work, start the ollama app or run `ollama serve`
+model = 'stablelm-zephyr' # TODO: update this for whatever model you wish to use
+
+def generate(prompt, context):
+ r = requests.post('http://localhost:11434/api/generate',
+ json={
+ 'model': model,
+ 'prompt': prompt,
+ 'context': context,
+ },
+ stream=True)
+ r.raise_for_status()
+
+ for line in r.iter_lines():
+ body = json.loads(line)
+ response_part = body.get('response', '')
+ # the response streams one token at a time, print that as we receive it
+ print(response_part, end='', flush=True)
+
+ if 'error' in body:
+ raise Exception(body['error'])
+
+ if body.get('done', False):
+ return body['context']
+
+def main():
+ context = [] # the context stores a conversation history, you can use this to make the model more context aware
+ while True:
+ user_input = input("Enter a prompt: ")
+ if not user_input:
+ exit()
+ print()
+ context = generate(user_input, context)
+ print()
+
+if __name__ == "__main__":
+ main()
diff --git a/ollama/examples/python-simplegenerate/requirements.txt b/ollama/examples/python-simplegenerate/requirements.txt
new file mode 100755
index 0000000000000000000000000000000000000000..9688b8ec02db6cbc34b987b2167075033d4ec11b
--- /dev/null
+++ b/ollama/examples/python-simplegenerate/requirements.txt
@@ -0,0 +1 @@
+Requests==2.31.0
diff --git a/ollama/examples/typescript-functioncalling/extractemail.ts b/ollama/examples/typescript-functioncalling/extractemail.ts
new file mode 100755
index 0000000000000000000000000000000000000000..a2f0b2d1f943efcbf8aa333e894dcc5fbfb7491d
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/extractemail.ts
@@ -0,0 +1,118 @@
+import { Ollama } from "ollama-node";
+import { readFile } from "fs/promises";
+
+// function to be called on events
+function reportEvents(name: string, date: string, location: string) {
+ const nameString = name ? `${name}` : `an event`;
+ const dateString = date ? ` on ${date}` : ``;
+ const locationString = location ? ` at ${location}` : ``;
+ console.log(`You have an event: ${nameString}${dateString}${locationString}`)
+}
+
+// function to be called on addresses
+function reportAddresses(address) {
+ for (const field in address) {
+ if (address[field]) {
+ if (field === "city") {
+ const city = address.city;
+ const state = address.state ? `, ${address.state}` : '';
+ const zip = address.zip ? ` ${address.zip}` : '';
+ console.log(`${city}${state}${zip}`);
+ break;
+ } else {
+ console.log(`${address[field]}`);
+ }
+ }
+ }
+ console.log(``);
+}
+
+async function main() {
+
+ const ollama = new Ollama();
+
+ const systemprompt = `You will be given a text along with a prompt and a schema. You will have to extract the information requested in the prompt from the text and generate output in JSON observing the schema provided. If the schema shows a type of integer or number, you must only show a integer for that field. A string should always be a valid string. If a value is unknown, leave it empty. Output the JSON with extra spaces to ensure that it pretty prints.`
+
+ const schema = {
+ "eventsQuantity": {
+ "type": "integer",
+ "description": "The number of events in the source text"
+ },
+ "addressesQuantity": {
+ "type": "integer",
+ "description": "The number of addresses in the source text"
+ },
+ "events": [{
+ name: {
+ "type": "string",
+ description: "Name of the event"
+ },
+ "date": {
+ "type": "string",
+ "description": "Date of the event"
+ },
+ "location": {
+ "type": "string",
+ "description": "Location of the event"
+ },
+ "extraInfo": {
+ "type": "string",
+ "description": "Any extra information that is provided about the event."
+ }
+ }],
+ "people": [{
+ "name": {
+ "type": "string",
+ "description": "Name of the person"
+ },
+ "company": {
+ "type": "string",
+ "description": "Name of the company where they work"
+ },
+ "street": {
+ "type": "string",
+ "description": "Street address of the person or company. This is only the street name and the numerical address. Do not include city, state, or zip of the address in this field."
+ },
+ "city": {
+ "type": "string",
+ "description": "City portion of the address of the person or company"
+ },
+ "state": {
+ "type": "string",
+ "description": "State portion of the address of the person or company"
+ },
+ "zip": {
+ "type": "string",
+ "description": "Zip code of the person or company"
+ },
+ "extraInfo": {
+ "type": "string",
+ "description": "Any extra information that is provided about the location."
+ }
+ }]
+ }
+
+ const textcontent = await readFile("./info.txt", "utf-8").then((text) => text.split(" ").slice(0, 2000).join(" "));
+
+ const prompt = `The source text is a series of emails that have been put into a single file. They are separated by three dashes. Review the source text and determine the full address of the person sending each of the emails as well as any events that we need to track. If they provide a company address use that. If any extra info is provided, such as a description of the place, or a floor, add it to extraInfo. The first field in the address JSON is quantity of events and should be set to the number of events tracked and the second field should be set to the number of addresses tracked in the file. Don't stuff an event into the output that isn't an event. Only add data to the mostly appropriate field. Don't make up fields that aren't in the schema. If there isn't a value for a field, use null. Output should be in JSON.\n\nSchema: \n${JSON.stringify(schema, null, 2)}\n\nSource Text:\n${textcontent}`
+
+ await ollama.setModel("neural-chat");
+ ollama.setSystemPrompt(systemprompt);
+ ollama.setJSONFormat(true);
+ const data = await ollama.generate(prompt);
+ const output = JSON.parse(data.output);
+ const events = output.events;
+ const addresses = output.people;
+
+ console.log(`Here are your ${output.eventsQuantity} events:`);
+ for (const event of events) {
+ reportEvents(event.name, event.date, event.location);
+ }
+
+ console.log(`\n\nHere are your ${output.addressesQuantity} addresses:`);
+ for (const address of addresses) {
+ reportAddresses(address);
+ }
+}
+
+main();
\ No newline at end of file
diff --git a/ollama/examples/typescript-functioncalling/extractwp.ts b/ollama/examples/typescript-functioncalling/extractwp.ts
new file mode 100755
index 0000000000000000000000000000000000000000..b199607d5c0acc149ff4f520d271464a00b300aa
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/extractwp.ts
@@ -0,0 +1,38 @@
+import { Ollama } from "ollama-node";
+import { readFile } from "fs/promises";
+
+async function main() {
+
+ const ollama = new Ollama();
+
+ // Set the system prompt to prepare the model to receive a prompt and a schema and set some rules for the output.
+ const systemprompt = `You will be given a text along with a prompt and a schema. You will have to extract the information requested in the prompt from the text and generate output in JSON observing the schema provided. If the schema shows a type of integer or number, you must only show a integer for that field. A string should always be a valid string. If a value is unknown, leave it empty. Output the JSON with extra spaces to ensure that it pretty prints.`
+
+ const schema = {
+ "people": [{
+ "name": {
+ "type": "string",
+ "description": "Name of the person"
+ },
+ "title": {
+ "type": "string",
+ "description": "Title of the person"
+ }
+ }],
+ }
+
+ // Depending on the model chosen, you may be limited by the size of the context window, so limit the context to 2000 words.
+ const textcontent = await readFile("./wp.txt", "utf-8").then((text) => text.split(" ").slice(0, 2000).join(" "));
+
+ // Specific instructions for this task
+ const prompt = `Review the source text and determine the 10 most important people to focus on. Then extract the name and title for those people. Output should be in JSON.\n\nSchema: \n${JSON.stringify(schema, null, 2)}\n\nSource Text:\n${textcontent}`
+
+ await ollama.setModel("neural-chat");
+ ollama.setSystemPrompt(systemprompt);
+
+ // setJSONFormat is the equivalent of setting 'format: json' in the API
+ ollama.setJSONFormat(true);
+ await ollama.streamingGenerate(prompt, (word) => { process.stdout.write(word) })
+}
+
+main();
\ No newline at end of file
diff --git a/ollama/examples/typescript-functioncalling/info.txt b/ollama/examples/typescript-functioncalling/info.txt
new file mode 100755
index 0000000000000000000000000000000000000000..4fe0e1ca127640705c73e8e04a61d7a355f69b54
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/info.txt
@@ -0,0 +1,17 @@
+---
+Hi matt,
+
+thanks for letting me know that you are going to come today, November 16, for my tea party. My address is 123 Falk St on Bainbridge Island. I live in the house with the red door. I will be home all day so just come by whenever you want.
+
+Fred
+
+---
+Great, send the check to our office at 1917 1st St, Seattle, WA 98101. I will let you know when we receive it.
+
+Mark Richardson
+Big Corp
+---
+We are looking forward to seeing you at our Local AI Meetup. It will be held on December 3. It will be at the offices of Enormous Co. Our address is 344 1st Ave, Seattle, WA 98101. We will be meeting in the conference room on the 3rd floor.
+
+Barbara Reilly
+Enormous Co.
\ No newline at end of file
diff --git a/ollama/examples/typescript-functioncalling/package-lock.json b/ollama/examples/typescript-functioncalling/package-lock.json
new file mode 100755
index 0000000000000000000000000000000000000000..c555ddaef69ced51e733723702bd69a5ff61ff03
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/package-lock.json
@@ -0,0 +1,519 @@
+{
+ "name": "typescript-functioncalling",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "dependencies": {
+ "ollama-node": "^0.1.27"
+ },
+ "devDependencies": {
+ "tsx": "^4.1.2",
+ "typescript": "^5.2.2"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz",
+ "integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz",
+ "integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz",
+ "integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz",
+ "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz",
+ "integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz",
+ "integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz",
+ "integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz",
+ "integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz",
+ "integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz",
+ "integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz",
+ "integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz",
+ "integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz",
+ "integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz",
+ "integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz",
+ "integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz",
+ "integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz",
+ "integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz",
+ "integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz",
+ "integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz",
+ "integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz",
+ "integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz",
+ "integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "20.9.0",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.9.0.tgz",
+ "integrity": "sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw==",
+ "dependencies": {
+ "undici-types": "~5.26.4"
+ }
+ },
+ "node_modules/buffer-from": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
+ "dev": true
+ },
+ "node_modules/esbuild": {
+ "version": "0.18.20",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz",
+ "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/android-arm": "0.18.20",
+ "@esbuild/android-arm64": "0.18.20",
+ "@esbuild/android-x64": "0.18.20",
+ "@esbuild/darwin-arm64": "0.18.20",
+ "@esbuild/darwin-x64": "0.18.20",
+ "@esbuild/freebsd-arm64": "0.18.20",
+ "@esbuild/freebsd-x64": "0.18.20",
+ "@esbuild/linux-arm": "0.18.20",
+ "@esbuild/linux-arm64": "0.18.20",
+ "@esbuild/linux-ia32": "0.18.20",
+ "@esbuild/linux-loong64": "0.18.20",
+ "@esbuild/linux-mips64el": "0.18.20",
+ "@esbuild/linux-ppc64": "0.18.20",
+ "@esbuild/linux-riscv64": "0.18.20",
+ "@esbuild/linux-s390x": "0.18.20",
+ "@esbuild/linux-x64": "0.18.20",
+ "@esbuild/netbsd-x64": "0.18.20",
+ "@esbuild/openbsd-x64": "0.18.20",
+ "@esbuild/sunos-x64": "0.18.20",
+ "@esbuild/win32-arm64": "0.18.20",
+ "@esbuild/win32-ia32": "0.18.20",
+ "@esbuild/win32-x64": "0.18.20"
+ }
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.3",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
+ "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
+ "dev": true,
+ "hasInstallScript": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/get-tsconfig": {
+ "version": "4.7.2",
+ "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz",
+ "integrity": "sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==",
+ "dev": true,
+ "dependencies": {
+ "resolve-pkg-maps": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
+ }
+ },
+ "node_modules/ollama-node": {
+ "version": "0.1.27",
+ "resolved": "https://registry.npmjs.org/ollama-node/-/ollama-node-0.1.27.tgz",
+ "integrity": "sha512-tFABPf5P0sXCR5USA31E3tqbge5h/4uf/t5j8/rPvHDo0SDwXeN0kah2J7hIqqkYlO1vLRs0uLC1/Mprgv9t2g==",
+ "dependencies": {
+ "@types/node": "^20.8.4"
+ }
+ },
+ "node_modules/resolve-pkg-maps": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
+ "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
+ "dev": true,
+ "funding": {
+ "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
+ }
+ },
+ "node_modules/source-map": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/source-map-support": {
+ "version": "0.5.21",
+ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
+ "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
+ "dev": true,
+ "dependencies": {
+ "buffer-from": "^1.0.0",
+ "source-map": "^0.6.0"
+ }
+ },
+ "node_modules/tsx": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.1.2.tgz",
+ "integrity": "sha512-1spM1bFV6MP2s4tO4tDC7g52fsaFdtEWdO4GfGdqi20qUgPbnAJqixOyIAvCSx1DDj3YIUB4CD06owTWUsOAuQ==",
+ "dev": true,
+ "dependencies": {
+ "esbuild": "~0.18.20",
+ "get-tsconfig": "^4.7.2",
+ "source-map-support": "^0.5.21"
+ },
+ "bin": {
+ "tsx": "dist/cli.mjs"
+ },
+ "engines": {
+ "node": ">=18.0.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ }
+ },
+ "node_modules/typescript": {
+ "version": "5.2.2",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz",
+ "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==",
+ "dev": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/undici-types": {
+ "version": "5.26.5",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
+ "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
+ }
+ }
+}
diff --git a/ollama/examples/typescript-functioncalling/package.json b/ollama/examples/typescript-functioncalling/package.json
new file mode 100755
index 0000000000000000000000000000000000000000..4e8fff8cf2a24b95375eb7093d9e6999238cf592
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/package.json
@@ -0,0 +1,9 @@
+{
+ "dependencies": {
+ "ollama-node": "^0.1.27"
+ },
+ "devDependencies": {
+ "tsx": "^4.1.2",
+ "typescript": "^5.2.2"
+ }
+}
diff --git a/ollama/examples/typescript-functioncalling/readme.md b/ollama/examples/typescript-functioncalling/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..d29379a035ea506d5ed27f4e714c937c2d1c77b5
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/readme.md
@@ -0,0 +1,28 @@
+# Function calling
+
+
+
+One of the features added to some models is 'function calling'. It's a bit of a confusing name. It's understandable if you think that means the model can call functions, but that's not what it means. Function calling simply means that the output of the model is formatted in JSON, using a preconfigured schema, and uses the expected types. Then your code can use the output of the model and call functions with it. Using the JSON format in Ollama, you can use any model for function calling.
+
+The two examples provided can extract information out of the provided texts. The first example uses the first couple of chapters from War and Peace by Lev Nikolayevich Tolstoy, and extracts the names and titles of the characters introduced in the story. The second example uses a more complicated schema to pull out addresses and event information from a series of emails.
+
+## Running the examples
+
+1. Clone this repo and navigate to the `examples/typescript-functioncalling` directory.
+2. Install the dependencies with `npm install`.
+3. Review the `wp.txt` file.
+4. Run `tsx extractwp.ts`.
+5. Review the `info.txt` file.
+6. Run `tsx extractemail.ts`.
+
+## Review the Code
+
+Both examples do roughly the same thing with different source material. They both use the same system prompt, which tells the model to expect some instructions and a schema. Then we inject the schema into the prompt and generate an answer.
+
+The first example, `extractwp.ts`, outputs the resulting JSON to the console, listing the characters introduced at the start of War and Peace. The second example, `extractemail.ts`, is a bit more complicated, extracting two different types of information: addresses and events. It outputs the results to a JSON blob, then the addresses are handed off to one function called `reportAddresses` and the events are handed off to another function called `reportEvents`.
+
+Notice that both examples are using the model from Intel called `neural-chat`. This is not a model tuned for function calling, yet it performs very well at this task.
+
+## Next Steps
+
+Try exporting some of your real emails to the input file and seeing how well the model does. Try pointing the first example at other books. You could even have it cycle through all the sections and maybe add up the number of times any character is seen throughout the book, determining the most important characters. You can also try out different models.
diff --git a/ollama/examples/typescript-functioncalling/wp.txt b/ollama/examples/typescript-functioncalling/wp.txt
new file mode 100755
index 0000000000000000000000000000000000000000..0ef848674062091ab39ab1efc5e74df5bb4d234e
--- /dev/null
+++ b/ollama/examples/typescript-functioncalling/wp.txt
@@ -0,0 +1,183 @@
+"Well, Prince, so Genoa and Lucca are now just family estates of the Buonapartes. But I warn you, if you don't tell me that this means war, if you still try to defend the infamies and horrors perpetrated by that Antichrist - I really believe he is Antichrist - I will have nothing more to do with you and you are no longer my friend, no longer my 'faithful slave,' as you call yourself! But how do you do? I see I have frightened you - sit down and tell me all the news."
+
+It was in July, 1805, and the speaker was the well-known Anna Pavlovna Scherer, maid of honor and favorite of the Empress Marya Fedorovna. With these words she greeted Prince Vasili Kuragin, a man of high rank and importance, who was the first to arrive at her reception. Anna Pavlovna had had a cough for some days. She was, as she said, suffering from la grippe; grippe being then a new word in St. Petersburg, used only by the elite.
+
+All her invitations without exception, written in French, and delivered by a scarlet-liveried footman that morning, ran as follows:
+
+"If you have nothing better to do, Count (or Prince), and if the prospect of spending an evening with a poor invalid is not too terrible, I shall be very charmed to see you tonight between 7 and 10 - Annette Scherer."
+
+"Heavens! what a virulent attack!" replied the prince, not in the least disconcerted by this reception. He had just entered, wearing an embroidered court uniform, knee breeches, and shoes, and had stars on his breast and a serene expression on his flat face. He spoke in that refined French in which our grandfathers not only spoke but thought, and with the gentle, patronizing intonation natural to a man of importance who had grown old in society and at court. He went up to Anna Pavlovna, kissed her hand, presenting to her his bald, scented, and shining head, and complacently seated himself on the sofa.
+
+"First of all, dear friend, tell me how you are. Set your friend's mind at rest," said he without altering his tone, beneath the politeness and affected sympathy of which indifference and even irony could be discerned.
+
+"Can one be well while suffering morally? Can one be calm in times like these if one has any feeling?" said Anna Pavlovna. "You are staying the whole evening, I hope?"
+
+"And the fete at the English ambassador's? Today is Wednesday. I must put in an appearance there," said the prince. "My daughter is coming for me to take me there."
+
+"I thought today's fete had been canceled. I confess all these festivities and fireworks are becoming wearisome."
+
+"If they had known that you wished it, the entertainment would have been put off," said the prince, who, like a wound-up clock, by force of habit said things he did not even wish to be believed.
+
+"Don't tease! Well, and what has been decided about Novosiltsev's dispatch? You know everything."
+
+"What can one say about it?" replied the prince in a cold, listless tone. "What has been decided? They have decided that Buonaparte has burnt his boats, and I believe that we are ready to burn ours."
+
+Prince Vasili always spoke languidly, like an actor repeating a stale part. Anna Pavlovna Scherer on the contrary, despite her forty years, overflowed with animation and impulsiveness. To be an enthusiast had become her social vocation and, sometimes even when she did not feel like it, she became enthusiastic in order not to disappoint the expectations of those who knew her. The subdued smile which, though it did not suit her faded features, always played round her lips expressed, as in a spoiled child, a continual consciousness of her charming defect, which she neither wished, nor could, nor considered it necessary, to correct.
+
+In the midst of a conversation on political matters Anna Pavlovna burst out:
+
+"Oh, don't speak to me of Austria. Perhaps I don't understand things, but Austria never has wished, and does not wish, for war. She is betraying us! Russia alone must save Europe. Our gracious sovereign recognizes his high vocation and will be true to it. That is the one thing I have faith in! Our good and wonderful sovereign has to perform the noblest role on earth, and he is so virtuous and noble that God will not forsake him. He will fulfill his vocation and crush the hydra of revolution, which has become more terrible than ever in the person of this murderer and villain! We alone must avenge the blood of the just one.... Whom, I ask you, can we rely on?... England with her commercial spirit will not and cannot understand the Emperor Alexander's loftiness of soul. She has refused to evacuate Malta. She wanted to find, and still seeks, some secret motive in our actions. What answer did Novosiltsev get? None. The English have not understood and cannot understand the self-abnegation of our Emperor who wants nothing for himself, but only desires the good of mankind. And what have they promised? Nothing! And what little they have promised they will not perform! Prussia has always declared that Buonaparte is invincible, and that all Europe is powerless before him.... And I don't believe a word that Hardenburg says, or Haugwitz either. This famous Prussian neutrality is just a trap. I have faith only in God and the lofty destiny of our adored monarch. He will save Europe!"
+
+She suddenly paused, smiling at her own impetuosity.
+
+"I think," said the prince with a smile, "that if you had been sent instead of our dear Wintzingerode you would have captured the King of Prussia's consent by assault. You are so eloquent. Will you give me a cup of tea?"
+
+"In a moment. A propos," she added, becoming calm again, "I am expecting two very interesting men tonight, le Vicomte de Mortemart, who is connected with the Montmorencys through the Rohans, one of the best French families. He is one of the genuine emigres, the good ones. And also the Abbe Morio. Do you know that profound thinker? He has been received by the Emperor. Had you heard?"
+
+"I shall be delighted to meet them," said the prince. "But tell me," he added with studied carelessness as if it had only just occurred to him, though the question he was about to ask was the chief motive of his visit, "is it true that the Dowager Empress wants Baron Funke to be appointed first secretary at Vienna? The baron by all accounts is a poor creature."
+
+Prince Vasili wished to obtain this post for his son, but others were trying through the Dowager Empress Marya Fedorovna to secure it for the baron.
+
+Anna Pavlovna almost closed her eyes to indicate that neither she nor anyone else had a right to criticize what the Empress desired or was pleased with.
+
+"Baron Funke has been recommended to the Dowager Empress by her sister," was all she said, in a dry and mournful tone.
+
+As she named the Empress, Anna Pavlovna's face suddenly assumed an expression of profound and sincere devotion and respect mingled with sadness, and this occurred every time she mentioned her illustrious patroness. She added that Her Majesty had deigned to show Baron Funke beaucoup d'estime, and again her face clouded over with sadness.
+
+The prince was silent and looked indifferent. But, with the womanly and courtierlike quickness and tact habitual to her, Anna Pavlovna wished both to rebuke him (for daring to speak as he had done of a man recommended to the Empress) and at the same time to console him, so she said:
+
+"Now about your family. Do you know that since your daughter came out everyone has been enraptured by her? They say she is amazingly beautiful."
+
+The prince bowed to signify his respect and gratitude.
+
+"I often think," she continued after a short pause, drawing nearer to the prince and smiling amiably at him as if to show that political and social topics were ended and the time had come for intimate conversation - "I often think how unfairly sometimes the joys of life are distributed. Why has fate given you two such splendid children? I don't speak of Anatole, your youngest. I don't like him," she added in a tone admitting of no rejoinder and raising her eyebrows. "Two such charming children. And really you appreciate them less than anyone, and so you don't deserve to have them."
+
+And she smiled her ecstatic smile.
+
+"I can't help it," said the prince. "Lavater would have said I lack the bump of paternity."
+
+"Don't joke; I mean to have a serious talk with you. Do you know I am dissatisfied with your younger son? Between ourselves" (and her face assumed its melancholy expression), "he was mentioned at Her Majesty's and you were pitied...."
+
+The prince answered nothing, but she looked at him significantly, awaiting a reply. He frowned.
+
+"What would you have me do?" he said at last. "You know I did all a father could for their education, and they have both turned out fools. Hippolyte is at least a quiet fool, but Anatole is an active one. That is the only difference between them." He said this smiling in a way more natural and animated than usual, so that the wrinkles round his mouth very clearly revealed something unexpectedly coarse and unpleasant.
+
+"And why are children born to such men as you? If you were not a father there would be nothing I could reproach you with," said Anna Pavlovna, looking up pensively.
+
+"I am your faithful slave and to you alone I can confess that my children are the bane of my life. It is the cross I have to bear. That is how I explain it to myself. It can't be helped!"
+
+He said no more, but expressed his resignation to cruel fate by a gesture. Anna Pavlovna meditated.
+
+"Have you never thought of marrying your prodigal son Anatole?" she asked. "They say old maids have a mania for matchmaking, and though I don't feel that weakness in myself as yet, I know a little person who is very unhappy with her father. She is a relation of yours, Princess Mary Bolkonskaya."
+
+Prince Vasili did not reply, though, with the quickness of memory and perception befitting a man of the world, he indicated by a movement of the head that he was considering this information.
+
+"Do you know," he said at last, evidently unable to check the sad current of his thoughts, "that Anatole is costing me forty thousand rubles a year? And," he went on after a pause, "what will it be in five years, if he goes on like this?" Presently he added: "That's what we fathers have to put up with.... Is this princess of yours rich?"
+
+"Her father is very rich and stingy. He lives in the country. He is the well-known Prince Bolkonski who had to retire from the army under the late Emperor, and was nicknamed 'the King of Prussia.' He is very clever but eccentric, and a bore. The poor girl is very unhappy. She has a brother; I think you know him, he married Lise Meinen lately. He is an aide-de-camp of Kutuzov's and will be here tonight."
+
+"Listen, dear Annette," said the prince, suddenly taking Anna Pavlovna's hand and for some reason drawing it downwards. "Arrange that affair for me and I shall always be your most devoted slave-slafe with an f, as a village elder of mine writes in his reports. She is rich and of good family and that's all I want."
+
+And with the familiarity and easy grace peculiar to him, he raised the maid of honor's hand to his lips, kissed it, and swung it to and fro as he lay back in his armchair, looking in another direction.
+
+"Attendez," said Anna Pavlovna, reflecting, "I'll speak to Lise, young Bolkonski's wife, this very evening, and perhaps the thing can be arranged. It shall be on your family's behalf that I'll start my apprenticeship as old maid."
+
+Anna Pavlovna's drawing room was gradually filling. The highest Petersburg society was assembled there: people differing widely in age and character but alike in the social circle to which they belonged. Prince Vasili's daughter, the beautiful Helene, came to take her father to the ambassador's entertainment; she wore a ball dress and her badge as maid of honor. The youthful little Princess Bolkonskaya, known as la femme la plus seduisante de Petersbourg, * was also there. She had been married during the previous winter, and being pregnant did not go to any large gatherings, but only to small receptions. Prince Vasili's son, Hippolyte, had come with Mortemart, whom he introduced. The Abbe Morio and many others had also come.
+
+* The most fascinating woman in Petersburg.
+
+To each new arrival Anna Pavlovna said, "You have not yet seen my aunt," or "You do not know my aunt?" and very gravely conducted him or her to a little old lady, wearing large bows of ribbon in her cap, who had come sailing in from another room as soon as the guests began to arrive; and slowly turning her eyes from the visitor to her aunt, Anna Pavlovna mentioned each one's name and then left them.
+
+Each visitor performed the ceremony of greeting this old aunt whom not one of them knew, not one of them wanted to know, and not one of them cared about; Anna Pavlovna observed these greetings with mournful and solemn interest and silent approval. The aunt spoke to each of them in the same words, about their health and her own, and the health of Her Majesty, "who, thank God, was better today." And each visitor, though politeness prevented his showing impatience, left the old woman with a sense of relief at having performed a vexatious duty and did not return to her the whole evening.
+
+The young Princess Bolkonskaya had brought some work in a gold-embroidered velvet bag. Her pretty little upper lip, on which a delicate dark down was just perceptible, was too short for her teeth, but it lifted all the more sweetly, and was especially charming when she occasionally drew it down to meet the lower lip. As is always the case with a thoroughly attractive woman, her defect - the shortness of her upper lip and her half-open mouth - seemed to be her own special and peculiar form of beauty. Everyone brightened at the sight of this pretty young woman, so soon to become a mother, so full of life and health, and carrying her burden so lightly. Old men and dull dispirited young ones who looked at her, after being in her company and talking to her a little while, felt as if they too were becoming, like her, full of life and health. All who talked to her, and at each word saw her bright smile and the constant gleam of her white teeth, thought that they were in a specially amiable mood that day.
+
+The little princess went round the table with quick, short, swaying steps, her workbag on her arm, and gaily spreading out her dress sat down on a sofa near the silver samovar, as if all she was doing was a pleasure to herself and to all around her. "I have brought my work," said she in French, displaying her bag and addressing all present. "Mind, Annette, I hope you have not played a wicked trick on me," she added, turning to her hostess. "You wrote that it was to be quite a small reception, and just see how badly I am dressed." And she spread out her arms to show her short-waisted, lace-trimmed, dainty gray dress, girdled with a broad ribbon just below the breast.
+
+"Soyez tranquille, Lise, you will always be prettier than anyone else," replied Anna Pavlovna.
+
+"You know," said the princess in the same tone of voice and still in French, turning to a general, "my husband is deserting me? He is going to get himself killed. Tell me what this wretched war is for?" she added, addressing Prince Vasili, and without waiting for an answer she turned to speak to his daughter, the beautiful Helene.
+
+"What a delightful woman this little princess is!" said Prince Vasili to Anna Pavlovna.
+
+One of the next arrivals was a stout, heavily built young man with close-cropped hair, spectacles, the light-colored breeches fashionable at that time, a very high ruffle, and a brown dress coat. This stout young man was an illegitimate son of Count Bezukhov, a well-known grandee of Catherine's time who now lay dying in Moscow. The young man had not yet entered either the military or civil service, as he had only just returned from abroad where he had been educated, and this was his first appearance in society. Anna Pavlovna greeted him with the nod she accorded to the lowest hierarchy in her drawing room. But in spite of this lowest-grade greeting, a look of anxiety and fear, as at the sight of something too large and unsuited to the place, came over her face when she saw Pierre enter. Though he was certainly rather bigger than the other men in the room, her anxiety could only have reference to the clever though shy, but observant and natural, expression which distinguished him from everyone else in that drawing room.
+
+"It is very good of you, Monsieur Pierre, to come and visit a poor invalid," said Anna Pavlovna, exchanging an alarmed glance with her aunt as she conducted him to her.
+
+Pierre murmured something unintelligible, and continued to look round as if in search of something. On his way to the aunt he bowed to the little princess with a pleased smile, as to an intimate acquaintance.
+
+Anna Pavlovna's alarm was justified, for Pierre turned away from the aunt without waiting to hear her speech about Her Majesty's health. Anna Pavlovna in dismay detained him with the words: "Do you know the Abbe Morio? He is a most interesting man."
+
+"Yes, I have heard of his scheme for perpetual peace, and it is very interesting but hardly feasible."
+
+"You think so?" rejoined Anna Pavlovna in order to say something and get away to attend to her duties as hostess. But Pierre now committed a reverse act of impoliteness. First he had left a lady before she had finished speaking to him, and now he continued to speak to another who wished to get away. With his head bent, and his big feet spread apart, he began explaining his reasons for thinking the abbe's plan chimerical.
+
+"We will talk of it later," said Anna Pavlovna with a smile.
+
+And having got rid of this young man who did not know how to behave, she resumed her duties as hostess and continued to listen and watch, ready to help at any point where the conversation might happen to flag. As the foreman of a spinning mill, when he has set the hands to work, goes round and notices here a spindle that has stopped or there one that creaks or makes more noise than it should, and hastens to check the machine or set it in proper motion, so Anna Pavlovna moved about her drawing room, approaching now a silent, now a too-noisy group, and by a word or slight rearrangement kept the conversational machine in steady, proper, and regular motion. But amid these cares her anxiety about Pierre was evident. She kept an anxious watch on him when he approached the group round Mortemart to listen to what was being said there, and again when he passed to another group whose center was the abbe.
+
+Pierre had been educated abroad, and this reception at Anna Pavlovna's was the first he had attended in Russia. He knew that all the intellectual lights of Petersburg were gathered there and, like a child in a toyshop, did not know which way to look, afraid of missing any clever conversation that was to be heard. Seeing the self-confident and refined expression on the faces of those present he was always expecting to hear something very profound. At last he came up to Morio. Here the conversation seemed interesting and he stood waiting for an opportunity to express his own views, as young people are fond of doing.
+
+CHAPTER III
+Anna Pavlovna's reception was in full swing. The spindles hummed steadily and ceaselessly on all sides. With the exception of the aunt, beside whom sat only one elderly lady, who with her thin careworn face was rather out of place in this brilliant society, the whole company had settled into three groups. One, chiefly masculine, had formed round the abbe. Another, of young people, was grouped round the beautiful Princess Helene, Prince Vasili's daughter, and the little Princess Bolkonskaya, very pretty and rosy, though rather too plump for her age. The third group was gathered round Mortemart and Anna Pavlovna.
+
+The vicomte was a nice-looking young man with soft features and polished manners, who evidently considered himself a celebrity but out of politeness modestly placed himself at the disposal of the circle in which he found himself. Anna Pavlovna was obviously serving him up as a treat to her guests. As a clever maitre d'hotel serves up as a specially choice delicacy a piece of meat that no one who had seen it in the kitchen would have cared to eat, so Anna Pavlovna served up to her guests, first the vicomte and then the abbe, as peculiarly choice morsels. The group about Mortemart immediately began discussing the murder of the Duc d'Enghien. The vicomte said that the Duc d'Enghien had perished by his own magnanimity, and that there were particular reasons for Buonaparte's hatred of him.
+
+"Ah, yes! Do tell us all about it, Vicomte," said Anna Pavlovna, with a pleasant feeling that there was something A la Louis XV in the sound of that sentence: "Contez nous cela, Vicomte."
+
+The vicomte bowed and smiled courteously in token of his willingness to comply. Anna Pavlovna arranged a group round him, inviting everyone to listen to his tale.
+
+"The vicomte knew the duc personally," whispered Anna Pavlovna to one of the guests. "The vicomte is a wonderful raconteur," said she to another. "How evidently he belongs to the best society," said she to a third; and the vicomte was served up to the company in the choicest and most advantageous style, like a well-garnished joint of roast beef on a hot dish.
+
+The vicomte wished to begin his story and gave a subtle smile.
+
+"Come over here, Helene, dear," said Anna Pavlovna to the beautiful young princess who was sitting some way off, the center of another group.
+
+The princess smiled. She rose with the same unchanging smile with which she had first entered the room - the smile of a perfectly beautiful woman. With a slight rustle of her white dress trimmed with moss and ivy, with a gleam of white shoulders, glossy hair, and sparkling diamonds, she passed between the men who made way for her, not looking at any of them but smiling on all, as if graciously allowing each the privilege of admiring her beautiful figure and shapely shoulders, back, and bosom - which in the fashion of those days were very much exposed - and she seemed to bring the glamour of a ballroom with her as she moved toward Anna Pavlovna. Helene was so lovely that not only did she not show any trace of coquetry, but on the contrary she even appeared shy of her unquestionable and all too victorious beauty. She seemed to wish, but to be unable, to diminish its effect.
+
+"How lovely!" said everyone who saw her; and the vicomte lifted his shoulders and dropped his eyes as if startled by something extraordinary when she took her seat opposite and beamed upon him also with her unchanging smile.
+
+"Madame, I doubt my ability before such an audience," said he, smilingly inclining his head.
+
+The princess rested her bare round arm on a little table and considered a reply unnecessary. She smilingly waited. All the time the story was being told she sat upright, glancing now at her beautiful round arm, altered in shape by its pressure on the table, now at her still more beautiful bosom, on which she readjusted a diamond necklace. From time to time she smoothed the folds of her dress, and whenever the story produced an effect she glanced at Anna Pavlovna, at once adopted just the expression she saw on the maid of honor's face, and again relapsed into her radiant smile.
+
+The little princess had also left the tea table and followed Helene.
+
+"Wait a moment, I'll get my work.... Now then, what are you thinking of?" she went on, turning to Prince Hippolyte. "Fetch me my workbag."
+
+There was a general movement as the princess, smiling and talking merrily to everyone at once, sat down and gaily arranged herself in her seat.
+
+"Now I am all right," she said, and asking the vicomte to begin, she took up her work.
+
+Prince Hippolyte, having brought the workbag, joined the circle and moving a chair close to hers seated himself beside her.
+
+Le charmant Hippolyte was surprising by his extraordinary resemblance to his beautiful sister, but yet more by the fact that in spite of this resemblance he was exceedingly ugly. His features were like his sister's, but while in her case everything was lit up by a joyous, self-satisfied, youthful, and constant smile of animation, and by the wonderful classic beauty of her figure, his face on the contrary was dulled by imbecility and a constant expression of sullen self-confidence, while his body was thin and weak. His eyes, nose, and mouth all seemed puckered into a vacant, wearied grimace, and his arms and legs always fell into unnatural positions.
+
+"It's not going to be a ghost story?" said he, sitting down beside the princess and hastily adjusting his lorgnette, as if without this instrument he could not begin to speak.
+
+"Why no, my dear fellow," said the astonished narrator, shrugging his shoulders.
+
+"Because I hate ghost stories," said Prince Hippolyte in a tone which showed that he only understood the meaning of his words after he had uttered them.
+
+He spoke with such self-confidence that his hearers could not be sure whether what he said was very witty or very stupid. He was dressed in a dark-green dress coat, knee breeches of the color of cuisse de nymphe effrayee, as he called it, shoes, and silk stockings.
+
+The vicomte told his tale very neatly. It was an anecdote, then current, to the effect that the Duc d'Enghien had gone secretly to Paris to visit Mademoiselle George; that at her house he came upon Bonaparte, who also enjoyed the famous actress' favors, and that in his presence Napoleon happened to fall into one of the fainting fits to which he was subject, and was thus at the duc's mercy. The latter spared him, and this magnanimity Bonaparte subsequently repaid by death.
+
+The story was very pretty and interesting, especially at the point where the rivals suddenly recognized one another; and the ladies looked agitated.
+
+"Charming!" said Anna Pavlovna with an inquiring glance at the little princess.
+
+"Charming!" whispered the little princess, sticking the needle into her work as if to testify that the interest and fascination of the story prevented her from going on with it.
+
+The vicomte appreciated this silent praise and smiling gratefully prepared to continue, but just then Anna Pavlovna, who had kept a watchful eye on the young man who so alarmed her, noticed that he was talking too loudly and vehemently with the abbe, so she hurried to the rescue. Pierre had managed to start a conversation with the abbe about the balance of power, and the latter, evidently interested by the young man's simple-minded eagerness, was explaining his pet theory. Both were talking and listening too eagerly and too naturally, which was why Anna Pavlovna disapproved.
+
+"The means are ... the balance of power in Europe and the rights of the people," the abbe was saying. "It is only necessary for one powerful nation like Russia - barbaric as she is said to be - to place herself disinterestedly at the head of an alliance having for its object the maintenance of the balance of power of Europe, and it would save the world!"
+
+"But how are you to get that balance?" Pierre was beginning.
+
+At that moment Anna Pavlovna came up and, looking severely at Pierre, asked the Italian how he stood Russian climate. The Italian's face instantly changed and assumed an offensively affected, sugary expression, evidently habitual to him when conversing with women.
+
+"I am so enchanted by the brilliancy of the wit and culture of the society, more especially of the feminine society, in which I have had the honor of being received, that I have not yet had time to think of the climate," said he.
+
+Not letting the abbe and Pierre escape, Anna Pavlovna, the more conveniently to keep them under observation, brought them into the larger circle.
+
diff --git a/ollama/examples/typescript-mentors/.gitignore b/ollama/examples/typescript-mentors/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..d5f19d89b308d36ee9e27b912015b0f75ec2f03f
--- /dev/null
+++ b/ollama/examples/typescript-mentors/.gitignore
@@ -0,0 +1,2 @@
+node_modules
+package-lock.json
diff --git a/ollama/examples/typescript-mentors/README.md b/ollama/examples/typescript-mentors/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..d3611a5e8543651bf22a0defacbc728cf0fbe7b5
--- /dev/null
+++ b/ollama/examples/typescript-mentors/README.md
@@ -0,0 +1,65 @@
+# Ask the Mentors
+
+This example demonstrates how one would create a set of 'mentors' you can have a conversation with. The mentors are generated using the `character-generator.ts` file. This will use **Stable Beluga 70b** to create a bio and list of verbal ticks and common phrases used by each person. Then `mentors.ts` will take a question, and choose three of the 'mentors' and start a conversation with them. Occasionally, they will talk to each other, and other times they will just deliver a set of monologues. It's fun to see what they do and say.
+
+## Usage
+
+1. Add llama3 to have the mentors ask your questions:
+
+ ```bash
+ ollama pull llama3
+ ```
+
+2. Install prerequisites:
+
+ ```bash
+ npm install
+ ```
+
+3. Ask a question:
+
+ ```bash
+ npm start "what is a jackalope"
+ ```
+
+You can also add your own character to be chosen at random when you ask a question.
+
+1. Make sure you have the right model installed:
+
+ ```bash
+ ollama pull stablebeluga2:70b-q4_K_M
+ ```
+
+2. Create a new character:
+
+ ```bash
+ npm run charactergen "Lorne Greene"
+ ```
+
+ You can choose any well-known person you like. This example will create `lornegreene/Modelfile`.
+
+3. Now you can create a model with this command:
+
+ ```bash
+ ollama create /lornegreene -f lornegreene/Modelfile
+ ```
+
+ `username` is whatever name you set up when you signed up at [https://ollama.com/signup](https://ollama.com/signup).
+
+4. To add this to your mentors, you will have to update the code as follows. On line 8 of `mentors.ts`, add an object to the array, replacing `` with the username you used above.
+
+ ```bash
+ {ns: "", char: "Lorne Greene"}
+ ```
+
+## Review the Code
+
+There are two scripts you can run in this example. The first is the main script to ask the mentors a question. The other one lets you generate a character to add to the mentors. Both scripts are mostly about adjusting the prompts at each inference stage.
+
+### mentors.ts
+
+In the **main** function, it starts by generating a list of mentors. This chooses 3 from a list of interesting characters. Then we ask for a question, and then things get interesting. We set the prompt for each of the 3 mentors a little differently. And the 2nd and 3rd mentors see what the previous folks said. The other functions in mentors sets the prompts for each mentor.
+
+### character-generator.ts
+
+**Character Generator** simply customizes the prompt to build a character profile for any famous person. And most of the script is just tweaking the prompt. This uses Stable Beluga 2 70b parameters. The 70b models tend to do better writing a bio about a character than smaller models, and Stable Beluga seemed to do better than Llama 2. Since this is used at development time for the characters, it doesn't affect the runtime of asking the mentors for their input.
diff --git a/ollama/examples/typescript-mentors/character-generator.ts b/ollama/examples/typescript-mentors/character-generator.ts
new file mode 100755
index 0000000000000000000000000000000000000000..dc5d2f5eaf77ce4241ed2e45f6c2be0832054c02
--- /dev/null
+++ b/ollama/examples/typescript-mentors/character-generator.ts
@@ -0,0 +1,26 @@
+import { Ollama } from 'ollama-node'
+import fs from 'fs';
+import path from 'path';
+
+async function characterGenerator() {
+ const character = process.argv[2];
+ console.log(`You are creating a character for ${character}.`);
+ const foldername = character.replace(/\s/g, '').toLowerCase();
+ const directory = path.join(__dirname, foldername);
+ if (!fs.existsSync(directory)) {
+ fs.mkdirSync(directory, { recursive: true });
+ }
+
+ const ollama = new Ollama();
+ ollama.setModel("stablebeluga2:70b-q4_K_M");
+ const bio = await ollama.generate(`create a bio of ${character} in a single long paragraph. Instead of saying '${character} is...' or '${character} was...' use language like 'You are...' or 'You were...'. Then create a paragraph describing the speaking mannerisms and style of ${character}. Don't include anything about how ${character} looked or what they sounded like, just focus on the words they said. Instead of saying '${character} would say...' use language like 'You should say...'. If you use quotes, always use single quotes instead of double quotes. If there are any specific words or phrases you used a lot, show how you used them. `);
+
+ const thecontents = `FROM llama3\nSYSTEM """\n${bio.response.replace(/(\r\n|\n|\r)/gm, " ").replace('would', 'should')} All answers to questions should be related back to what you are most known for.\n"""`;
+
+ fs.writeFile(path.join(directory, 'Modelfile'), thecontents, (err: any) => {
+ if (err) throw err;
+ console.log('The file has been saved!');
+ });
+}
+
+characterGenerator();
diff --git a/ollama/examples/typescript-mentors/mentors.ts b/ollama/examples/typescript-mentors/mentors.ts
new file mode 100755
index 0000000000000000000000000000000000000000..17d70476867806faace60e285fc11f93d63cd2f4
--- /dev/null
+++ b/ollama/examples/typescript-mentors/mentors.ts
@@ -0,0 +1,60 @@
+import { Ollama } from 'ollama-node';
+
+const mentorCount = 3;
+const ollama = new Ollama();
+type Mentor = { ns: string, char: string };
+
+function getMentors(): Mentor[] {
+ const mentors = [{ ns: 'mattw', char: 'Gary Vaynerchuk' }, { ns: 'mattw', char: 'Kanye West'}, {ns: 'mattw', char: 'Martha Stewart'}, {ns: 'mattw', char: 'Neil deGrasse Tyson'}, {ns: 'mattw', char: 'Owen Wilson'}, {ns: 'mattw', char: 'Ronald Reagan'}, {ns: 'mattw', char: 'Donald Trump'}, {ns: 'mattw', char: 'Barack Obama'}, {ns: 'mattw', char: 'Jeff Bezos'}];
+ const chosenMentors: Mentor[] = [];
+ for (let i = 0; i < mentorCount; i++) {
+ const mentor = mentors[Math.floor(Math.random() * mentors.length)];
+ chosenMentors.push(mentor);
+ mentors.splice(mentors.indexOf(mentor), 1);
+ }
+ return chosenMentors;
+}
+
+function getMentorFileName(mentor: Mentor): string {
+ const model = mentor.char.toLowerCase().replace(/\s/g, '');
+ return `${mentor.ns}/${model}`;
+}
+
+async function getSystemPrompt(mentor: Mentor, isLast: boolean, question: string): Promise {
+ ollama.setModel(getMentorFileName(mentor));
+ const info = await ollama.showModelInfo()
+ let SystemPrompt = info.system || '';
+ SystemPrompt += ` You should continue the conversation as if you were ${mentor} and acknowledge the people before you in the conversation. You should adopt their mannerisms and tone, but also not use language they wouldn't use. If they are not known to know about the concept in the question, don't offer an answer. Your answer should be no longer than 1 paragraph. And definitely try not to sound like anyone else. Don't repeat any slang or phrases already used. And if it is a question the original ${mentor} wouldn't have know the answer to, just say that you don't know, in the style of ${mentor}. And think about the time the person lived. Don't use terminology that they wouldn't have used.`
+
+ if (isLast) {
+ SystemPrompt += ` End your answer with something like I hope our answers help you out`;
+ } else {
+ SystemPrompt += ` Remember, this is a conversation, so you don't need a conclusion, but end your answer with a question related to the first question: "${question}".`;
+ }
+ return SystemPrompt;
+}
+
+async function main() {
+ const mentors = getMentors();
+ const question = process.argv[2];
+ let theConversation = `Here is the conversation so far.\nYou: ${question}\n`
+
+ for await (const mentor of mentors) {
+ const SystemPrompt = await getSystemPrompt(mentor, mentor === mentors[mentorCount - 1], question);
+ ollama.setModel(getMentorFileName(mentor));
+ ollama.setSystemPrompt(SystemPrompt);
+ let output = '';
+ process.stdout.write(`\n${mentor.char}: `);
+ for await (const chunk of ollama.streamingGenerate(theConversation + `Continue the conversation as if you were ${mentor.char} on the question "${question}".`)) {
+ if (chunk.response) {
+ output += chunk.response;
+ process.stdout.write(chunk.response);
+ } else {
+ process.stdout.write('\n');
+ }
+ }
+ theConversation += `${mentor.char}: ${output}\n\n`
+ }
+}
+
+main();
\ No newline at end of file
diff --git a/ollama/examples/typescript-mentors/package.json b/ollama/examples/typescript-mentors/package.json
new file mode 100755
index 0000000000000000000000000000000000000000..537f3df12c3309542c26e6b27d397fbcb20eeac8
--- /dev/null
+++ b/ollama/examples/typescript-mentors/package.json
@@ -0,0 +1,15 @@
+{
+ "scripts": {
+ "charactergen": "tsx character-generator.ts",
+ "start": "tsx mentors.ts"
+ },
+ "dependencies": {
+ "fs": "^0.0.1-security",
+ "ollama-node": "^0.0.3",
+ "path": "^0.12.7"
+ },
+ "devDependencies": {
+ "tsx": "^4.6.2",
+ "typescript": "^5.3.3"
+ }
+}
diff --git a/ollama/examples/typescript-simplechat/client.ts b/ollama/examples/typescript-simplechat/client.ts
new file mode 100755
index 0000000000000000000000000000000000000000..8ad113b12c0c50dc772e57907614e8d0bbc55828
--- /dev/null
+++ b/ollama/examples/typescript-simplechat/client.ts
@@ -0,0 +1,77 @@
+import * as readline from "readline";
+
+const model = "llama3.1";
+type Message = {
+ role: "assistant" | "user" | "system";
+ content: string;
+}
+const messages: Message[] = [{
+ role: "system",
+ content: "You are a helpful AI agent."
+}]
+
+const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+})
+
+async function chat(messages: Message[]): Promise {
+ const body = {
+ model: model,
+ messages: messages
+ }
+
+ const response = await fetch("http://localhost:11434/api/chat", {
+ method: "POST",
+ body: JSON.stringify(body)
+ })
+
+ const reader = response.body?.getReader()
+ if (!reader) {
+ throw new Error("Failed to read response body")
+ }
+ let content = ""
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) {
+ break;
+ }
+ const rawjson = new TextDecoder().decode(value);
+ const json = JSON.parse(rawjson)
+
+ if (json.done === false) {
+ process.stdout.write(json.message.content);
+ content += json.message.content
+ }
+
+ }
+ return { role: "assistant", content: content };
+}
+
+async function askQuestion(): Promise {
+ return new Promise((resolve) => {
+ rl.question("\n\nAsk a question: (press enter alone to quit)\n\n", async (user_input) => {
+ if (user_input.trim() === "") {
+ rl.close();
+ console.log("Thankyou. Goodbye.\n")
+ console.log("=======\nHere is the message history that was used in this conversation.\n=======\n")
+ messages.forEach(message => {
+ console.log(message)
+ })
+ resolve();
+ } else {
+ console.log();
+ messages.push({ role: "user", content: user_input });
+ messages.push(await chat(messages));
+ await askQuestion(); // Ask the next question
+ }
+ });
+ });
+}
+
+async function main() {
+ await askQuestion();
+
+}
+
+main();
diff --git a/ollama/examples/typescript-simplechat/package.json b/ollama/examples/typescript-simplechat/package.json
new file mode 100755
index 0000000000000000000000000000000000000000..6ae8c1aaeecbb09afd112765c746219da8ddba70
--- /dev/null
+++ b/ollama/examples/typescript-simplechat/package.json
@@ -0,0 +1,12 @@
+{
+ "scripts": {
+ "start": "tsx client.ts"
+ },
+ "dependencies": {
+ "@types/node": "^20.10.4",
+ "prompt-sync": "^4.2.0",
+ "readline": "^1.3.0",
+ "tsx": "^4.6.2",
+ "typescript": "^5.3.3"
+ }
+ }
\ No newline at end of file
diff --git a/ollama/examples/typescript-simplechat/readme.md b/ollama/examples/typescript-simplechat/readme.md
new file mode 100755
index 0000000000000000000000000000000000000000..5635b9d226d69f32446fd49624a20054129c74bd
--- /dev/null
+++ b/ollama/examples/typescript-simplechat/readme.md
@@ -0,0 +1,35 @@
+# Simple Chat Example
+
+The **chat** endpoint, available as of v0.1.14, is one of two ways to generate text from an LLM with Ollama. At a high level, you provide the endpoint an array of message objects with a role and content specified. Then with each output and prompt, you add more messages, which builds up the history.
+
+## Run the Example
+
+`npm start`
+
+## Review the Code
+
+You can see in the **chat** function that is actually calling the endpoint is simply done with:
+
+```typescript
+const body = {
+ model: model,
+ messages: messages
+}
+
+const response = await fetch("http://localhost:11434/api/chat", {
+ method: "POST",
+ body: JSON.stringify(body)
+})
+```
+
+With the **generate** endpoint, you need to provide a `prompt`. But with **chat**, you provide `messages`. And the resulting stream of responses includes a `message` object with a `content` field.
+
+The final JSON object doesn't provide the full content, so you will need to build the content yourself. In this example, **chat** takes the full array of messages and outputs the resulting message from this call of the chat endpoint.
+
+In the **askQuestion** function, we collect `user_input` and add it as a message to our messages, and that is passed to the chat function. When the LLM is done responding, the output is added as another message to the messages array.
+
+At the end, you will see a printout of all the messages.
+
+## Next Steps
+
+In this example, all generations are kept. You might want to experiment with summarizing everything older than 10 conversations to enable longer history with less context being used.
diff --git a/ollama/format/bytes.go b/ollama/format/bytes.go
new file mode 100755
index 0000000000000000000000000000000000000000..13d8575e911847bb8336a0ddc94d2e9809e9f84f
--- /dev/null
+++ b/ollama/format/bytes.go
@@ -0,0 +1,65 @@
+package format
+
+import (
+ "fmt"
+ "math"
+)
+
+const (
+ Byte = 1
+
+ KiloByte = Byte * 1000
+ MegaByte = KiloByte * 1000
+ GigaByte = MegaByte * 1000
+ TeraByte = GigaByte * 1000
+
+ KibiByte = Byte * 1024
+ MebiByte = KibiByte * 1024
+ GibiByte = MebiByte * 1024
+)
+
+func HumanBytes(b int64) string {
+ var value float64
+ var unit string
+
+ switch {
+ case b >= TeraByte:
+ value = float64(b) / TeraByte
+ unit = "TB"
+ case b >= GigaByte:
+ value = float64(b) / GigaByte
+ unit = "GB"
+ case b >= MegaByte:
+ value = float64(b) / MegaByte
+ unit = "MB"
+ case b >= KiloByte:
+ value = float64(b) / KiloByte
+ unit = "KB"
+ default:
+ return fmt.Sprintf("%d B", b)
+ }
+
+ switch {
+ case value >= 100:
+ return fmt.Sprintf("%d %s", int(value), unit)
+ case value >= 10:
+ return fmt.Sprintf("%d %s", int(value), unit)
+ case value != math.Trunc(value):
+ return fmt.Sprintf("%.1f %s", value, unit)
+ default:
+ return fmt.Sprintf("%d %s", int(value), unit)
+ }
+}
+
+func HumanBytes2(b uint64) string {
+ switch {
+ case b >= GibiByte:
+ return fmt.Sprintf("%.1f GiB", float64(b)/GibiByte)
+ case b >= MebiByte:
+ return fmt.Sprintf("%.1f MiB", float64(b)/MebiByte)
+ case b >= KibiByte:
+ return fmt.Sprintf("%.1f KiB", float64(b)/KibiByte)
+ default:
+ return fmt.Sprintf("%d B", b)
+ }
+}
diff --git a/ollama/format/format.go b/ollama/format/format.go
new file mode 100755
index 0000000000000000000000000000000000000000..ac50570dfe4c272feb8b7e52c92addddf4a3c162
--- /dev/null
+++ b/ollama/format/format.go
@@ -0,0 +1,34 @@
+package format
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+)
+
+const (
+ Thousand = 1000
+ Million = Thousand * 1000
+ Billion = Million * 1000
+)
+
+func HumanNumber(b uint64) string {
+ switch {
+ case b >= Billion:
+ number := float64(b) / Billion
+ if number == math.Floor(number) {
+ return fmt.Sprintf("%.0fB", number) // no decimals if whole number
+ }
+ return fmt.Sprintf("%.1fB", number) // one decimal if not a whole number
+ case b >= Million:
+ number := float64(b) / Million
+ if number == math.Floor(number) {
+ return fmt.Sprintf("%.0fM", number) // no decimals if whole number
+ }
+ return fmt.Sprintf("%.2fM", number) // two decimals if not a whole number
+ case b >= Thousand:
+ return fmt.Sprintf("%.0fK", float64(b)/Thousand)
+ default:
+ return strconv.FormatUint(b, 10)
+ }
+}
diff --git a/ollama/format/format_test.go b/ollama/format/format_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..bff32780300ba1eeea4e928b4444ff3d83dba052
--- /dev/null
+++ b/ollama/format/format_test.go
@@ -0,0 +1,33 @@
+package format
+
+import (
+ "testing"
+)
+
+func TestHumanNumber(t *testing.T) {
+ type testCase struct {
+ input uint64
+ expected string
+ }
+
+ testCases := []testCase{
+ {0, "0"},
+ {1000000, "1M"},
+ {125000000, "125M"},
+ {500500000, "500.50M"},
+ {500550000, "500.55M"},
+ {1000000000, "1B"},
+ {2800000000, "2.8B"},
+ {2850000000, "2.9B"},
+ {1000000000000, "1000B"},
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.expected, func(t *testing.T) {
+ result := HumanNumber(tc.input)
+ if result != tc.expected {
+ t.Errorf("Expected %s, got %s", tc.expected, result)
+ }
+ })
+ }
+}
diff --git a/ollama/format/time.go b/ollama/format/time.go
new file mode 100755
index 0000000000000000000000000000000000000000..74062848b85980210c8f5f4c319c01189ea6c858
--- /dev/null
+++ b/ollama/format/time.go
@@ -0,0 +1,70 @@
+package format
+
+import (
+ "fmt"
+ "math"
+ "strings"
+ "time"
+)
+
+// humanDuration returns a human-readable approximation of a
+// duration (eg. "About a minute", "4 hours ago", etc.).
+func humanDuration(d time.Duration) string {
+ seconds := int(d.Seconds())
+
+ switch {
+ case seconds < 1:
+ return "Less than a second"
+ case seconds == 1:
+ return "1 second"
+ case seconds < 60:
+ return fmt.Sprintf("%d seconds", seconds)
+ }
+
+ minutes := int(d.Minutes())
+ switch {
+ case minutes == 1:
+ return "About a minute"
+ case minutes < 60:
+ return fmt.Sprintf("%d minutes", minutes)
+ }
+
+ hours := int(math.Round(d.Hours()))
+ switch {
+ case hours == 1:
+ return "About an hour"
+ case hours < 48:
+ return fmt.Sprintf("%d hours", hours)
+ case hours < 24*7*2:
+ return fmt.Sprintf("%d days", hours/24)
+ case hours < 24*30*2:
+ return fmt.Sprintf("%d weeks", hours/24/7)
+ case hours < 24*365*2:
+ return fmt.Sprintf("%d months", hours/24/30)
+ }
+
+ return fmt.Sprintf("%d years", int(d.Hours())/24/365)
+}
+
+func HumanTime(t time.Time, zeroValue string) string {
+ return humanTime(t, zeroValue)
+}
+
+func HumanTimeLower(t time.Time, zeroValue string) string {
+ return strings.ToLower(humanTime(t, zeroValue))
+}
+
+func humanTime(t time.Time, zeroValue string) string {
+ if t.IsZero() {
+ return zeroValue
+ }
+
+ delta := time.Since(t)
+ if int(delta.Hours())/24/365 < -20 {
+ return "Forever"
+ } else if delta < 0 {
+ return humanDuration(-delta) + " from now"
+ }
+
+ return humanDuration(delta) + " ago"
+}
diff --git a/ollama/format/time_test.go b/ollama/format/time_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..bd0ba9a8f8357ee148e3c7eed913d5b79b392b46
--- /dev/null
+++ b/ollama/format/time_test.go
@@ -0,0 +1,45 @@
+package format
+
+import (
+ "testing"
+ "time"
+)
+
+func assertEqual(t *testing.T, a interface{}, b interface{}) {
+ if a != b {
+ t.Errorf("Assert failed, expected %v, got %v", b, a)
+ }
+}
+
+func TestHumanTime(t *testing.T) {
+ now := time.Now()
+
+ t.Run("zero value", func(t *testing.T) {
+ assertEqual(t, HumanTime(time.Time{}, "never"), "never")
+ })
+
+ t.Run("time in the future", func(t *testing.T) {
+ v := now.Add(48 * time.Hour)
+ assertEqual(t, HumanTime(v, ""), "2 days from now")
+ })
+
+ t.Run("time in the past", func(t *testing.T) {
+ v := now.Add(-48 * time.Hour)
+ assertEqual(t, HumanTime(v, ""), "2 days ago")
+ })
+
+ t.Run("soon", func(t *testing.T) {
+ v := now.Add(800 * time.Millisecond)
+ assertEqual(t, HumanTime(v, ""), "Less than a second from now")
+ })
+
+ t.Run("time way in the future", func(t *testing.T) {
+ v := now.Add(24 * time.Hour * 365 * 200)
+ assertEqual(t, HumanTime(v, ""), "Forever")
+ })
+
+ t.Run("time way in the future lowercase", func(t *testing.T) {
+ v := now.Add(24 * time.Hour * 365 * 200)
+ assertEqual(t, HumanTimeLower(v, ""), "forever")
+ })
+}
diff --git a/ollama/go.mod b/ollama/go.mod
new file mode 100755
index 0000000000000000000000000000000000000000..2e0c6614cf510a2c285677650ac92009216a0d87
--- /dev/null
+++ b/ollama/go.mod
@@ -0,0 +1,78 @@
+module github.com/ollama/ollama
+
+go 1.22.0
+
+require (
+ github.com/containerd/console v1.0.3
+ github.com/emirpasic/gods v1.18.1
+ github.com/gin-gonic/gin v1.10.0
+ github.com/golang/protobuf v1.5.4 // indirect
+ github.com/google/uuid v1.1.2
+ github.com/olekukonko/tablewriter v0.0.5
+ github.com/spf13/cobra v1.7.0
+ github.com/stretchr/testify v1.9.0
+ github.com/x448/float16 v0.8.4
+ golang.org/x/sync v0.3.0
+)
+
+require (
+ github.com/agnivade/levenshtein v1.1.1
+ github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1
+ github.com/google/go-cmp v0.6.0
+ github.com/mattn/go-runewidth v0.0.14
+ github.com/nlpodyssey/gopickle v0.3.0
+ github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c
+)
+
+require (
+ github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect
+ github.com/bytedance/sonic/loader v0.1.1 // indirect
+ github.com/chewxy/hm v1.0.0 // indirect
+ github.com/chewxy/math32 v1.10.1 // indirect
+ github.com/cloudwego/base64x v0.1.4 // indirect
+ github.com/cloudwego/iasm v0.2.0 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/gogo/protobuf v1.3.2 // indirect
+ github.com/google/flatbuffers v24.3.25+incompatible // indirect
+ github.com/kr/text v0.2.0 // indirect
+ github.com/pkg/errors v0.9.1 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/rivo/uniseg v0.2.0 // indirect
+ github.com/xtgo/set v1.0.0 // indirect
+ go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6 // indirect
+ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
+ gonum.org/v1/gonum v0.15.0 // indirect
+ gorgonia.org/vecf32 v0.9.0 // indirect
+ gorgonia.org/vecf64 v0.9.0 // indirect
+)
+
+require (
+ github.com/bytedance/sonic v1.11.6 // indirect
+ github.com/gabriel-vasile/mimetype v1.4.3 // indirect
+ github.com/gin-contrib/cors v1.7.2
+ github.com/gin-contrib/sse v0.1.0 // indirect
+ github.com/go-playground/locales v0.14.1 // indirect
+ github.com/go-playground/universal-translator v0.18.1 // indirect
+ github.com/go-playground/validator/v10 v10.20.0 // indirect
+ github.com/goccy/go-json v0.10.2 // indirect
+ github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/json-iterator/go v1.1.12 // indirect
+ github.com/klauspost/cpuid/v2 v2.2.7 // indirect
+ github.com/leodido/go-urn v1.4.0 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+ github.com/modern-go/reflect2 v1.0.2 // indirect
+ github.com/pelletier/go-toml/v2 v2.2.2 // indirect
+ github.com/spf13/pflag v1.0.5 // indirect
+ github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
+ github.com/ugorji/go/codec v1.2.12 // indirect
+ golang.org/x/arch v0.8.0 // indirect
+ golang.org/x/crypto v0.23.0
+ golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa
+ golang.org/x/net v0.25.0 // indirect
+ golang.org/x/sys v0.20.0
+ golang.org/x/term v0.20.0
+ golang.org/x/text v0.15.0
+ google.golang.org/protobuf v1.34.1
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+)
diff --git a/ollama/go.sum b/ollama/go.sum
new file mode 100755
index 0000000000000000000000000000000000000000..926ed26d882b81ac7fff28e32a20701f5b44ac0c
--- /dev/null
+++ b/ollama/go.sum
@@ -0,0 +1,364 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
+gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
+github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
+github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
+github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
+github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
+github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ=
+github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
+github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
+github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
+github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
+github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
+github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
+github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/chewxy/hm v1.0.0 h1:zy/TSv3LV2nD3dwUEQL2VhXeoXbb9QkpmdRAVUFiA6k=
+github.com/chewxy/hm v1.0.0/go.mod h1:qg9YI4q6Fkj/whwHR1D+bOGeF7SniIP40VweVepLjg0=
+github.com/chewxy/math32 v1.0.0/go.mod h1:Miac6hA1ohdDUTagnvJy/q+aNnEk16qWUdb8ZVhvCN0=
+github.com/chewxy/math32 v1.10.1 h1:LFpeY0SLJXeaiej/eIp2L40VYfscTvKh/FSEZ68uMkU=
+github.com/chewxy/math32 v1.10.1/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
+github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
+github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
+github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw=
+github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U=
+github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1 h1:cBzrdJPAFBsgCrDPnZxlp1dF2+k4r1kVpD7+1S1PVjY=
+github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1/go.mod h1:uw2gLcxEuYUlAd/EXyjc/v55nd3+47YAgWbSXVxPrNI=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g=
+github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
+github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
+github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
+github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
+github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
+github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
+github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
+github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
+github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
+github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g=
+github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks=
+github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
+github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY=
+github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
+github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
+github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
+github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
+github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
+github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
+github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
+github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
+github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA=
+github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
+github.com/google/flatbuffers v24.3.25+incompatible h1:CX395cjN9Kke9mmalRoL3d81AtFUxJM+yDthflgJGkI=
+github.com/google/flatbuffers v24.3.25+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
+github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
+github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/klauspost/compress v1.13.1 h1:wXr2uRxZTJXHLly6qhJabee5JqIhTRoLBhDOA74hDEQ=
+github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
+github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
+github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
+github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
+github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
+github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/nlpodyssey/gopickle v0.3.0 h1:BLUE5gxFLyyNOPzlXxt6GoHEMMxD0qhsE4p0CIQyoLw=
+github.com/nlpodyssey/gopickle v0.3.0/go.mod h1:f070HJ/yR+eLi5WmM1OXJEGaTpuJEUiib19olXgYha0=
+github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
+github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
+github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c h1:GwiUUjKefgvSNmv3NCvI/BL0kDebW6Xa+kcdpdc1mTY=
+github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c/go.mod h1:PSojXDXF7TbgQiD6kkd98IHOS0QqTyUEaWRiS8+BLu8=
+github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
+github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
+github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
+github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
+github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=
+github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
+github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
+github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
+github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
+github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
+github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
+github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
+github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
+github.com/xtgo/set v1.0.0 h1:6BCNBRv3ORNDQ7fyoJXRv+tstJz3m1JVFQErfeZz2pY=
+github.com/xtgo/set v1.0.0/go.mod h1:d3NHzGzSa0NmB2NhFyECA+QdRp29oEn2xbT+TpeFoM8=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
+go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6 h1:lGdhQUN/cnWdSH3291CUuxSEqc+AsGTiDxPP3r2J0l4=
+go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6/go.mod h1:FftLjUGFEDu5k8lt0ddY+HcrH/qU/0qk+H8j9/nTl3E=
+golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
+golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
+golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
+golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
+golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE=
+golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ=
+golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE=
+golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
+golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
+golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
+golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
+golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
+golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
+golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
+golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
+golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
+golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
+gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
+gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
+gonum.org/v1/gonum v0.15.0 h1:2lYxjRbTYyxkJxlhC+LvJIx3SsANPdRybu1tGj9/OrQ=
+gonum.org/v1/gonum v0.15.0/go.mod h1:xzZVBJBtS+Mz4q0Yl2LJTk+OxOg4jiXZ7qBoM0uISGo=
+gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
+gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
+gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
+google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gorgonia.org/vecf32 v0.9.0 h1:PClazic1r+JVJ1dEzRXgeiVl4g1/Hf/w+wUSqnco1Xg=
+gorgonia.org/vecf32 v0.9.0/go.mod h1:NCc+5D2oxddRL11hd+pCB1PEyXWOyiQxfZ/1wwhOXCA=
+gorgonia.org/vecf64 v0.9.0 h1:bgZDP5x0OzBF64PjMGC3EvTdOoMEcmfAh1VCUnZFm1A=
+gorgonia.org/vecf64 v0.9.0/go.mod h1:hp7IOWCnRiVQKON73kkC/AUMtEXyf9kGlVrtPQ9ccVA=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
+rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
diff --git a/ollama/gpu/amd_common.go b/ollama/gpu/amd_common.go
new file mode 100755
index 0000000000000000000000000000000000000000..2839cb7c31fd0bf1cc18bf174a4c907ae70eb9fc
--- /dev/null
+++ b/ollama/gpu/amd_common.go
@@ -0,0 +1,99 @@
+//go:build linux || windows
+
+package gpu
+
+import (
+ "errors"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+// Determine if the given ROCm lib directory is usable by checking for existence of some glob patterns
+func rocmLibUsable(libDir string) bool {
+ slog.Debug("evaluating potential rocm lib dir " + libDir)
+ for _, g := range ROCmLibGlobs {
+ res, _ := filepath.Glob(filepath.Join(libDir, g))
+ if len(res) == 0 {
+ return false
+ }
+ }
+ return true
+}
+
+func GetSupportedGFX(libDir string) ([]string, error) {
+ var ret []string
+ files, err := filepath.Glob(filepath.Join(libDir, "rocblas", "library", "TensileLibrary_lazy_gfx*.dat"))
+ if err != nil {
+ return nil, err
+ }
+ for _, file := range files {
+ ret = append(ret, strings.TrimSuffix(strings.TrimPrefix(filepath.Base(file), "TensileLibrary_lazy_"), ".dat"))
+ }
+ return ret, nil
+}
+
+func rocmGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) {
+ ids := []string{}
+ for _, info := range gpuInfo {
+ if info.Library != "rocm" {
+ // TODO shouldn't happen if things are wired correctly...
+ slog.Debug("rocmGetVisibleDevicesEnv skipping over non-rocm device", "library", info.Library)
+ continue
+ }
+ ids = append(ids, info.ID)
+ }
+ return "HIP_VISIBLE_DEVICES", strings.Join(ids, ",")
+}
+
+func commonAMDValidateLibDir() (string, error) {
+ // Favor our bundled version
+
+ // Installer payload location if we're running the installed binary
+ exe, err := os.Executable()
+ if err == nil {
+ rocmTargetDir := filepath.Join(filepath.Dir(exe), "rocm")
+ if rocmLibUsable(rocmTargetDir) {
+ slog.Debug("detected ROCM next to ollama executable " + rocmTargetDir)
+ return rocmTargetDir, nil
+ }
+ }
+
+ // Prefer explicit HIP env var
+ hipPath := os.Getenv("HIP_PATH")
+ if hipPath != "" {
+ hipLibDir := filepath.Join(hipPath, "bin")
+ if rocmLibUsable(hipLibDir) {
+ slog.Debug("detected ROCM via HIP_PATH=" + hipPath)
+ return hipLibDir, nil
+ }
+ }
+
+ // Scan the LD_LIBRARY_PATH or PATH
+ pathEnv := "LD_LIBRARY_PATH"
+ if runtime.GOOS == "windows" {
+ pathEnv = "PATH"
+ }
+
+ paths := os.Getenv(pathEnv)
+ for _, path := range filepath.SplitList(paths) {
+ d, err := filepath.Abs(path)
+ if err != nil {
+ continue
+ }
+ if rocmLibUsable(d) {
+ return d, nil
+ }
+ }
+
+ // Well known location(s)
+ for _, path := range RocmStandardLocations {
+ if rocmLibUsable(path) {
+ return path, nil
+ }
+ }
+
+ return "", errors.New("no suitable rocm found, falling back to CPU")
+}
diff --git a/ollama/gpu/amd_hip_windows.go b/ollama/gpu/amd_hip_windows.go
new file mode 100755
index 0000000000000000000000000000000000000000..2cea282423db01c4114bb126ec253802d1943f53
--- /dev/null
+++ b/ollama/gpu/amd_hip_windows.go
@@ -0,0 +1,147 @@
+package gpu
+
+import (
+ "errors"
+ "fmt"
+ "log/slog"
+ "syscall"
+ "unsafe"
+
+ "golang.org/x/sys/windows"
+)
+
+const (
+ hipSuccess = 0
+ hipErrorNoDevice = 100
+)
+
+type hipDevicePropMinimal struct {
+ Name [256]byte
+ unused1 [140]byte
+ GcnArchName [256]byte // gfx####
+ iGPU int // Doesn't seem to actually report correctly
+ unused2 [128]byte
+}
+
+// Wrap the amdhip64.dll library for GPU discovery
+type HipLib struct {
+ dll windows.Handle
+ hipGetDeviceCount uintptr
+ hipGetDeviceProperties uintptr
+ hipMemGetInfo uintptr
+ hipSetDevice uintptr
+ hipDriverGetVersion uintptr
+}
+
+func NewHipLib() (*HipLib, error) {
+ // At runtime we depend on v6, so discover GPUs with the same library for a consistent set of GPUs
+ h, err := windows.LoadLibrary("amdhip64_6.dll")
+ if err != nil {
+ return nil, fmt.Errorf("unable to load amdhip64_6.dll, please make sure to upgrade to the latest amd driver: %w", err)
+ }
+ hl := &HipLib{}
+ hl.dll = h
+ hl.hipGetDeviceCount, err = windows.GetProcAddress(hl.dll, "hipGetDeviceCount")
+ if err != nil {
+ return nil, err
+ }
+ hl.hipGetDeviceProperties, err = windows.GetProcAddress(hl.dll, "hipGetDeviceProperties")
+ if err != nil {
+ return nil, err
+ }
+ hl.hipMemGetInfo, err = windows.GetProcAddress(hl.dll, "hipMemGetInfo")
+ if err != nil {
+ return nil, err
+ }
+ hl.hipSetDevice, err = windows.GetProcAddress(hl.dll, "hipSetDevice")
+ if err != nil {
+ return nil, err
+ }
+ hl.hipDriverGetVersion, err = windows.GetProcAddress(hl.dll, "hipDriverGetVersion")
+ if err != nil {
+ return nil, err
+ }
+ return hl, nil
+}
+
+// The hip library only evaluates the HIP_VISIBLE_DEVICES variable at startup
+// so we have to unload/reset the library after we do our initial discovery
+// to make sure our updates to that variable are processed by llama.cpp
+func (hl *HipLib) Release() {
+ err := windows.FreeLibrary(hl.dll)
+ if err != nil {
+ slog.Warn("failed to unload amdhip64.dll", "error", err)
+ }
+ hl.dll = 0
+}
+
+func (hl *HipLib) AMDDriverVersion() (driverMajor, driverMinor int, err error) {
+ if hl.dll == 0 {
+ return 0, 0, errors.New("dll has been unloaded")
+ }
+ var version int
+ status, _, err := syscall.SyscallN(hl.hipDriverGetVersion, uintptr(unsafe.Pointer(&version)))
+ if status != hipSuccess {
+ return 0, 0, fmt.Errorf("failed call to hipDriverGetVersion: %d %s", status, err)
+ }
+
+ slog.Debug("hipDriverGetVersion", "version", version)
+ driverMajor = version / 10000000
+ driverMinor = (version - (driverMajor * 10000000)) / 100000
+
+ return driverMajor, driverMinor, nil
+}
+
+func (hl *HipLib) HipGetDeviceCount() int {
+ if hl.dll == 0 {
+ slog.Error("dll has been unloaded")
+ return 0
+ }
+ var count int
+ status, _, err := syscall.SyscallN(hl.hipGetDeviceCount, uintptr(unsafe.Pointer(&count)))
+ if status == hipErrorNoDevice {
+ slog.Info("AMD ROCm reports no devices found")
+ return 0
+ }
+ if status != hipSuccess {
+ slog.Warn("failed call to hipGetDeviceCount", "status", status, "error", err)
+ }
+ return count
+}
+
+func (hl *HipLib) HipSetDevice(device int) error {
+ if hl.dll == 0 {
+ return errors.New("dll has been unloaded")
+ }
+ status, _, err := syscall.SyscallN(hl.hipSetDevice, uintptr(device))
+ if status != hipSuccess {
+ return fmt.Errorf("failed call to hipSetDevice: %d %s", status, err)
+ }
+ return nil
+}
+
+func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, error) {
+ if hl.dll == 0 {
+ return nil, errors.New("dll has been unloaded")
+ }
+ var props hipDevicePropMinimal
+ status, _, err := syscall.SyscallN(hl.hipGetDeviceProperties, uintptr(unsafe.Pointer(&props)), uintptr(device))
+ if status != hipSuccess {
+ return nil, fmt.Errorf("failed call to hipGetDeviceProperties: %d %s", status, err)
+ }
+ return &props, nil
+}
+
+// free, total, err
+func (hl *HipLib) HipMemGetInfo() (uint64, uint64, error) {
+ if hl.dll == 0 {
+ return 0, 0, errors.New("dll has been unloaded")
+ }
+ var totalMemory uint64
+ var freeMemory uint64
+ status, _, err := syscall.SyscallN(hl.hipMemGetInfo, uintptr(unsafe.Pointer(&freeMemory)), uintptr(unsafe.Pointer(&totalMemory)))
+ if status != hipSuccess {
+ return 0, 0, fmt.Errorf("failed call to hipMemGetInfo: %d %s", status, err)
+ }
+ return freeMemory, totalMemory, nil
+}
diff --git a/ollama/gpu/amd_linux.go b/ollama/gpu/amd_linux.go
new file mode 100755
index 0000000000000000000000000000000000000000..fee5834efa8e811eb1fe70f11875aaf986ac6f4a
--- /dev/null
+++ b/ollama/gpu/amd_linux.go
@@ -0,0 +1,460 @@
+package gpu
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "io"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "regexp"
+ "slices"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/ollama/ollama/envconfig"
+ "github.com/ollama/ollama/format"
+)
+
+// Discovery logic for AMD/ROCm GPUs
+
+const (
+ // DriverVersionFile = "/sys/module/amdgpu/version"
+ DriverVersionFile = "/sys/module/hydcu/version"
+ AMDNodesSysfsDir = "/sys/class/kfd/kfd/topology/nodes/"
+ GPUPropertiesFileGlob = AMDNodesSysfsDir + "*/properties"
+
+ // Prefix with the node dir
+ GPUTotalMemoryFileGlob = "mem_banks/*/properties" // size_in_bytes line
+
+ // Direct Rendering Manager sysfs location
+ DRMDeviceDirGlob = "/sys/class/drm/card*/device"
+ DRMTotalMemoryFile = "mem_info_vram_total"
+ DRMUsedMemoryFile = "mem_info_vram_used"
+
+ // In hex; properties file is in decimal
+ DRMUniqueIDFile = "unique_id"
+ DRMVendorFile = "vendor"
+ DRMDeviceFile = "device"
+)
+
+var (
+ // Used to validate if the given ROCm lib is usable
+ // ROCmLibGlobs = []string{"libhipblas.so.2*", "rocblas"} // TODO - probably include more coverage of files here...
+ ROCmLibGlobs = []string{"libhipblas.so.0.1","rocblas"}
+ // RocmStandardLocations = []string{"/opt/rocm/lib", "/usr/lib64"}
+ RocmStandardLocations = []string{"/opt/dtk/lib", "/usr/lib64"}
+)
+
+// Gather GPU information from the amdgpu driver if any supported GPUs are detected
+func AMDGetGPUInfo() []RocmGPUInfo {
+ resp := []RocmGPUInfo{}
+ if !AMDDetected() {
+ return resp
+ }
+
+ // Opportunistic logging of driver version to aid in troubleshooting
+ driverMajor, driverMinor, err := AMDDriverVersion()
+ if err != nil {
+ // TODO - if we see users crash and burn with the upstreamed kernel this can be adjusted to hard-fail rocm support and fallback to CPU
+ slog.Warn("ollama recommends running the https://www.amd.com/en/support/linux-drivers", "error", err)
+ }
+
+ // Determine if the user has already pre-selected which GPUs to look at, then ignore the others
+ var visibleDevices []string
+ hipVD := envconfig.HipVisibleDevices() // zero based index only
+ rocrVD := envconfig.RocrVisibleDevices() // zero based index or UUID, but consumer cards seem to not support UUID
+ gpuDO := envconfig.GpuDeviceOrdinal() // zero based index
+ switch {
+ // TODO is this priorty order right?
+ case hipVD != "":
+ visibleDevices = strings.Split(hipVD, ",")
+ case rocrVD != "":
+ visibleDevices = strings.Split(rocrVD, ",")
+ // TODO - since we don't yet support UUIDs, consider detecting and reporting here
+ // all our test systems show GPU-XX indicating UUID is not supported
+ case gpuDO != "":
+ visibleDevices = strings.Split(gpuDO, ",")
+ }
+
+ gfxOverride := envconfig.HsaOverrideGfxVersion()
+ var supported []string
+ libDir := ""
+
+ // The amdgpu driver always exposes the host CPU(s) first, but we have to skip them and subtract
+ // from the other IDs to get alignment with the HIP libraries expectations (zero is the first GPU, not the CPU)
+ matches, _ := filepath.Glob(GPUPropertiesFileGlob)
+ sort.Slice(matches, func(i, j int) bool {
+ // /sys/class/kfd/kfd/topology/nodes//properties
+ a, err := strconv.ParseInt(filepath.Base(filepath.Dir(matches[i])), 10, 64)
+ if err != nil {
+ slog.Debug("parse err", "error", err, "match", matches[i])
+ return false
+ }
+ b, err := strconv.ParseInt(filepath.Base(filepath.Dir(matches[j])), 10, 64)
+ if err != nil {
+ slog.Debug("parse err", "error", err, "match", matches[i])
+ return false
+ }
+ return a < b
+ })
+ cpuCount := 0
+ for _, match := range matches {
+ slog.Debug("evaluating amdgpu node " + match)
+ fp, err := os.Open(match)
+ if err != nil {
+ slog.Debug("failed to open sysfs node", "file", match, "error", err)
+ continue
+ }
+ defer fp.Close()
+ nodeID, err := strconv.Atoi(filepath.Base(filepath.Dir(match)))
+ if err != nil {
+ slog.Debug("failed to parse node ID", "error", err)
+ continue
+ }
+
+ scanner := bufio.NewScanner(fp)
+ isCPU := false
+ var major, minor, patch uint64
+ var vendor, device, uniqueID uint64
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
+ // Note: we could also use "cpu_cores_count X" where X is greater than zero to detect CPUs
+ if strings.HasPrefix(line, "gfx_target_version") {
+ ver := strings.Fields(line)
+
+ // Detect CPUs
+ if len(ver) == 2 && ver[1] == "0" {
+ slog.Debug("detected CPU " + match)
+ isCPU = true
+ break
+ }
+
+ if len(ver) != 2 || len(ver[1]) < 5 {
+ slog.Warn("malformed "+match, "gfx_target_version", line)
+ // If this winds up being a CPU, our offsets may be wrong
+ continue
+ }
+ l := len(ver[1])
+ var err1, err2, err3 error
+ patch, err1 = strconv.ParseUint(ver[1][l-2:l], 10, 32)
+ minor, err2 = strconv.ParseUint(ver[1][l-4:l-2], 10, 32)
+ major, err3 = strconv.ParseUint(ver[1][:l-4], 10, 32)
+ if err1 != nil || err2 != nil || err3 != nil {
+ slog.Debug("malformed int " + line)
+ continue
+ }
+ } else if strings.HasPrefix(line, "vendor_id") {
+ ver := strings.Fields(line)
+ if len(ver) != 2 {
+ slog.Debug("malformed", "vendor_id", line)
+ continue
+ }
+ vendor, err = strconv.ParseUint(ver[1], 10, 64)
+ if err != nil {
+ slog.Debug("malformed", "vendor_id", line, "error", err)
+ }
+ } else if strings.HasPrefix(line, "device_id") {
+ ver := strings.Fields(line)
+ if len(ver) != 2 {
+ slog.Debug("malformed", "device_id", line)
+ continue
+ }
+ device, err = strconv.ParseUint(ver[1], 10, 64)
+ if err != nil {
+ slog.Debug("malformed", "device_id", line, "error", err)
+ }
+ } else if strings.HasPrefix(line, "unique_id") {
+ ver := strings.Fields(line)
+ if len(ver) != 2 {
+ slog.Debug("malformed", "unique_id", line)
+ continue
+ }
+ uniqueID, err = strconv.ParseUint(ver[1], 10, 64)
+ if err != nil {
+ slog.Debug("malformed", "unique_id", line, "error", err)
+ }
+ }
+ // TODO - any other properties we want to extract and record?
+ // vendor_id + device_id -> pci lookup for "Name"
+ // Other metrics that may help us understand relative performance between multiple GPUs
+ }
+
+ // Note: while ./mem_banks/*/used_memory exists, it doesn't appear to take other VRAM consumers
+ // into consideration, so we instead map the device over to the DRM driver sysfs nodes which
+ // do reliably report VRAM usage.
+
+ if isCPU {
+ cpuCount++
+ continue
+ }
+
+ // CPUs are always first in the list
+ gpuID := nodeID - cpuCount
+
+ // Shouldn't happen, but just in case...
+ if gpuID < 0 {
+ slog.Error("unexpected amdgpu sysfs data resulted in negative GPU ID, please set OLLAMA_DEBUG=1 and report an issue")
+ return nil
+ }
+
+ if int(major) < RocmComputeMin {
+ slog.Warn(fmt.Sprintf("amdgpu too old gfx%d%x%x", major, minor, patch), "gpu", gpuID)
+ continue
+ }
+
+ // Look up the memory for the current node
+ totalMemory := uint64(0)
+ usedMemory := uint64(0)
+ var usedFile string
+ mapping := []struct {
+ id uint64
+ filename string
+ }{
+ {vendor, DRMVendorFile},
+ {device, DRMDeviceFile},
+ {uniqueID, DRMUniqueIDFile}, // Not all devices will report this
+ }
+ slog.Debug("mapping amdgpu to drm sysfs nodes", "amdgpu", match, "vendor", vendor, "device", device, "unique_id", uniqueID)
+ // Map over to DRM location to find the total/free memory
+ drmMatches, _ := filepath.Glob(DRMDeviceDirGlob)
+ for _, devDir := range drmMatches {
+ matched := true
+ for _, m := range mapping {
+ if m.id == 0 {
+ // Null ID means it didn't populate, so we can't use it to match
+ continue
+ }
+ filename := filepath.Join(devDir, m.filename)
+ buf, err := os.ReadFile(filename)
+ if err != nil {
+ slog.Debug("failed to read sysfs node", "file", filename, "error", err)
+ matched = false
+ break
+ }
+ // values here are in hex, strip off the lead 0x and parse so we can compare the numeric (decimal) values in amdgpu
+ cmp, err := strconv.ParseUint(strings.TrimPrefix(strings.TrimSpace(string(buf)), "0x"), 16, 64)
+ if err != nil {
+ slog.Debug("failed to parse sysfs node", "file", filename, "error", err)
+ matched = false
+ break
+ }
+ if cmp != m.id {
+ matched = false
+ break
+ }
+ }
+ if !matched {
+ continue
+ }
+
+ // Found the matching DRM directory
+ slog.Debug("matched", "amdgpu", match, "drm", devDir)
+ totalFile := filepath.Join(devDir, DRMTotalMemoryFile)
+ buf, err := os.ReadFile(totalFile)
+ if err != nil {
+ slog.Debug("failed to read sysfs node", "file", totalFile, "error", err)
+ break
+ }
+ totalMemory, err = strconv.ParseUint(strings.TrimSpace(string(buf)), 10, 64)
+ if err != nil {
+ slog.Debug("failed to parse sysfs node", "file", totalFile, "error", err)
+ break
+ }
+
+ usedFile = filepath.Join(devDir, DRMUsedMemoryFile)
+ usedMemory, err = getFreeMemory(usedFile)
+ if err != nil {
+ slog.Debug("failed to update used memory", "error", err)
+ }
+ break
+ }
+
+ // iGPU detection, remove this check once we can support an iGPU variant of the rocm library
+ if totalMemory < IGPUMemLimit {
+ slog.Info("unsupported Radeon iGPU detected skipping", "id", gpuID, "total", format.HumanBytes2(totalMemory))
+ continue
+ }
+ var name string
+ // TODO - PCI ID lookup
+ if vendor > 0 && device > 0 {
+ name = fmt.Sprintf("%04x:%04x", vendor, device)
+ }
+
+ slog.Debug("amdgpu memory", "gpu", gpuID, "total", format.HumanBytes2(totalMemory))
+ slog.Debug("amdgpu memory", "gpu", gpuID, "available", format.HumanBytes2(totalMemory-usedMemory))
+ gpuInfo := RocmGPUInfo{
+ GpuInfo: GpuInfo{
+ Library: "rocm",
+ memInfo: memInfo{
+ TotalMemory: totalMemory,
+ FreeMemory: (totalMemory - usedMemory),
+ },
+ ID: strconv.Itoa(gpuID),
+ Name: name,
+ Compute: fmt.Sprintf("gfx%d%x%x", major, minor, patch),
+ MinimumMemory: rocmMinimumMemory,
+ DriverMajor: driverMajor,
+ DriverMinor: driverMinor,
+ },
+ usedFilepath: usedFile,
+ }
+
+ // If the user wants to filter to a subset of devices, filter out if we aren't a match
+ if len(visibleDevices) > 0 {
+ include := false
+ for _, visible := range visibleDevices {
+ if visible == gpuInfo.ID {
+ include = true
+ break
+ }
+ }
+ if !include {
+ slog.Info("filtering out device per user request", "id", gpuInfo.ID, "visible_devices", visibleDevices)
+ continue
+ }
+ }
+
+ // Final validation is gfx compatibility - load the library if we haven't already loaded it
+ // even if the user overrides, we still need to validate the library
+ if libDir == "" {
+ libDir, err = AMDValidateLibDir()
+ if err != nil {
+ slog.Warn("unable to verify rocm library, will use cpu", "error", err)
+ return nil
+ }
+ }
+ gpuInfo.DependencyPath = libDir
+
+ if gfxOverride == "" {
+ // Only load supported list once
+ if len(supported) == 0 {
+ supported, err = GetSupportedGFX(libDir)
+ if err != nil {
+ slog.Warn("failed to lookup supported GFX types, falling back to CPU mode", "error", err)
+ return nil
+ }
+ slog.Debug("rocm supported GPUs", "types", supported)
+ }
+ gfx := gpuInfo.Compute
+ if !slices.Contains[[]string, string](supported, gfx) {
+ slog.Warn("amdgpu is not supported", "gpu", gpuInfo.ID, "gpu_type", gfx, "library", libDir, "supported_types", supported)
+ // TODO - consider discrete markdown just for ROCM troubleshooting?
+ slog.Warn("See https://github.com/ollama/ollama/blob/main/docs/gpu.md#overrides for HSA_OVERRIDE_GFX_VERSION usage")
+ continue
+ } else {
+ slog.Info("amdgpu is supported", "gpu", gpuInfo.ID, "gpu_type", gfx)
+ }
+ } else {
+ slog.Info("skipping rocm gfx compatibility check", "HSA_OVERRIDE_GFX_VERSION", gfxOverride)
+ }
+
+ // Check for env var workarounds
+ if name == "1002:687f" { // Vega RX 56
+ gpuInfo.EnvWorkarounds = append(gpuInfo.EnvWorkarounds, [2]string{"HSA_ENABLE_SDMA", "0"})
+ }
+
+ // The GPU has passed all the verification steps and is supported
+ resp = append(resp, gpuInfo)
+ }
+ if len(resp) == 0 {
+ slog.Info("no compatible amdgpu devices detected")
+ }
+ return resp
+}
+
+// Quick check for AMD driver so we can skip amdgpu discovery if not present
+func AMDDetected() bool {
+ // Some driver versions (older?) don't have a version file, so just lookup the parent dir
+ sysfsDir := filepath.Dir(DriverVersionFile)
+ _, err := os.Stat(sysfsDir)
+ if errors.Is(err, os.ErrNotExist) {
+ slog.Debug("amdgpu driver not detected " + sysfsDir)
+ return false
+ } else if err != nil {
+ slog.Debug("error looking up amd driver", "path", sysfsDir, "error", err)
+ return false
+ }
+ return true
+}
+
+// Prefer to use host installed ROCm, as long as it meets our minimum requirements
+// failing that, tell the user how to download it on their own
+func AMDValidateLibDir() (string, error) {
+ libDir, err := commonAMDValidateLibDir()
+ if err == nil {
+ return libDir, nil
+ }
+
+ // Well known ollama installer path
+ installedRocmDir := "/usr/share/ollama/lib/rocm"
+ if rocmLibUsable(installedRocmDir) {
+ return installedRocmDir, nil
+ }
+
+ // If we still haven't found a usable rocm, the user will have to install it on their own
+ slog.Warn("amdgpu detected, but no compatible rocm library found. Either install rocm v6, or follow manual install instructions at https://github.com/ollama/ollama/blob/main/docs/linux.md#manual-install")
+ return "", errors.New("no suitable rocm found, falling back to CPU")
+}
+
+func AMDDriverVersion() (driverMajor, driverMinor int, err error) {
+ _, err = os.Stat(DriverVersionFile)
+ if err != nil {
+ return 0, 0, fmt.Errorf("amdgpu version file missing: %s %w", DriverVersionFile, err)
+ }
+ fp, err := os.Open(DriverVersionFile)
+ if err != nil {
+ return 0, 0, err
+ }
+ defer fp.Close()
+ verString, err := io.ReadAll(fp)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ pattern := `\A(\d+)\.(\d+).*`
+ regex := regexp.MustCompile(pattern)
+ match := regex.FindStringSubmatch(string(verString))
+ if len(match) < 2 {
+ return 0, 0, fmt.Errorf("malformed version string %s", string(verString))
+ }
+ driverMajor, err = strconv.Atoi(match[1])
+ if err != nil {
+ return 0, 0, err
+ }
+ driverMinor, err = strconv.Atoi(match[2])
+ if err != nil {
+ return 0, 0, err
+ }
+ return driverMajor, driverMinor, nil
+}
+
+func (gpus RocmGPUInfoList) RefreshFreeMemory() error {
+ if len(gpus) == 0 {
+ return nil
+ }
+ for i := range gpus {
+ usedMemory, err := getFreeMemory(gpus[i].usedFilepath)
+ if err != nil {
+ return err
+ }
+ slog.Debug("updating rocm free memory", "gpu", gpus[i].ID, "name", gpus[i].Name, "before", format.HumanBytes2(gpus[i].FreeMemory), "now", format.HumanBytes2(gpus[i].TotalMemory-usedMemory))
+ gpus[i].FreeMemory = gpus[i].TotalMemory - usedMemory
+ }
+ return nil
+}
+
+func getFreeMemory(usedFile string) (uint64, error) {
+ buf, err := os.ReadFile(usedFile)
+ if err != nil {
+ return 0, fmt.Errorf("failed to read sysfs node %s %w", usedFile, err)
+ }
+ usedMemory, err := strconv.ParseUint(strings.TrimSpace(string(buf)), 10, 64)
+ if err != nil {
+ slog.Debug("failed to parse sysfs node", "file", usedFile, "error", err)
+ return 0, fmt.Errorf("failed to parse sysfs node %s %w", usedFile, err)
+ }
+ return usedMemory, nil
+}
diff --git a/ollama/gpu/amd_windows.go b/ollama/gpu/amd_windows.go
new file mode 100755
index 0000000000000000000000000000000000000000..edabeb43a877e59ae001aaa643dc6f95f64be787
--- /dev/null
+++ b/ollama/gpu/amd_windows.go
@@ -0,0 +1,192 @@
+package gpu
+
+import (
+ "bytes"
+ "errors"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "slices"
+ "strconv"
+ "strings"
+
+ "github.com/ollama/ollama/envconfig"
+ "github.com/ollama/ollama/format"
+)
+
+const (
+
+ // TODO We're lookinng for this exact name to detect iGPUs since hipGetDeviceProperties never reports integrated==true
+ iGPUName = "AMD Radeon(TM) Graphics"
+)
+
+var (
+ // Used to validate if the given ROCm lib is usable
+ ROCmLibGlobs = []string{"hipblas.dll", "rocblas"} // This is not sufficient to discern v5 vs v6
+ RocmStandardLocations = []string{"C:\\Program Files\\AMD\\ROCm\\6.1\\bin"} // TODO glob?
+)
+
+func AMDGetGPUInfo() []RocmGPUInfo {
+ resp := []RocmGPUInfo{}
+ hl, err := NewHipLib()
+ if err != nil {
+ slog.Debug(err.Error())
+ return nil
+ }
+ defer hl.Release()
+
+ driverMajor, driverMinor, err := hl.AMDDriverVersion()
+ if err != nil {
+ // For now this is benign, but we may eventually need to fail compatibility checks
+ slog.Debug("error looking up amd driver version", "error", err)
+ }
+
+ // Note: the HIP library automatically handles subsetting to any HIP_VISIBLE_DEVICES the user specified
+ count := hl.HipGetDeviceCount()
+ if count == 0 {
+ return nil
+ }
+ libDir, err := AMDValidateLibDir()
+ if err != nil {
+ slog.Warn("unable to verify rocm library, will use cpu", "error", err)
+ return nil
+ }
+
+ var supported []string
+ gfxOverride := envconfig.HsaOverrideGfxVersion()
+ if gfxOverride == "" {
+ supported, err = GetSupportedGFX(libDir)
+ if err != nil {
+ slog.Warn("failed to lookup supported GFX types, falling back to CPU mode", "error", err)
+ return nil
+ }
+ } else {
+ slog.Info("skipping rocm gfx compatibility check", "HSA_OVERRIDE_GFX_VERSION", gfxOverride)
+ }
+
+ slog.Debug("detected hip devices", "count", count)
+ // TODO how to determine the underlying device ID when visible devices is causing this to subset?
+ for i := range count {
+ err = hl.HipSetDevice(i)
+ if err != nil {
+ slog.Warn("set device", "id", i, "error", err)
+ continue
+ }
+
+ props, err := hl.HipGetDeviceProperties(i)
+ if err != nil {
+ slog.Warn("get properties", "id", i, "error", err)
+ continue
+ }
+ n := bytes.IndexByte(props.Name[:], 0)
+ name := string(props.Name[:n])
+ // TODO is UUID actually populated on windows?
+ // Can luid be used on windows for setting visible devices (and is it actually set?)
+ n = bytes.IndexByte(props.GcnArchName[:], 0)
+ gfx := string(props.GcnArchName[:n])
+ slog.Debug("hip device", "id", i, "name", name, "gfx", gfx)
+ // slog.Info(fmt.Sprintf("[%d] Integrated: %d", i, props.iGPU)) // DOESN'T REPORT CORRECTLY! Always 0
+ // TODO Why isn't props.iGPU accurate!?
+ if strings.EqualFold(name, iGPUName) {
+ slog.Info("unsupported Radeon iGPU detected skipping", "id", i, "name", name, "gfx", gfx)
+ continue
+ }
+ if gfxOverride == "" {
+ // Strip off Target Features when comparing
+ if !slices.Contains[[]string, string](supported, strings.Split(gfx, ":")[0]) {
+ slog.Warn("amdgpu is not supported", "gpu", i, "gpu_type", gfx, "library", libDir, "supported_types", supported)
+ // TODO - consider discrete markdown just for ROCM troubleshooting?
+ slog.Warn("See https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md for HSA_OVERRIDE_GFX_VERSION usage")
+ continue
+ } else {
+ slog.Debug("amdgpu is supported", "gpu", i, "gpu_type", gfx)
+ }
+ }
+
+ freeMemory, totalMemory, err := hl.HipMemGetInfo()
+ if err != nil {
+ slog.Warn("get mem info", "id", i, "error", err)
+ continue
+ }
+
+ // iGPU detection, remove this check once we can support an iGPU variant of the rocm library
+ if totalMemory < IGPUMemLimit {
+ slog.Info("amdgpu appears to be an iGPU, skipping", "gpu", i, "total", format.HumanBytes2(totalMemory))
+ continue
+ }
+
+ slog.Debug("amdgpu memory", "gpu", i, "total", format.HumanBytes2(totalMemory))
+ slog.Debug("amdgpu memory", "gpu", i, "available", format.HumanBytes2(freeMemory))
+ gpuInfo := RocmGPUInfo{
+ GpuInfo: GpuInfo{
+ Library: "rocm",
+ memInfo: memInfo{
+ TotalMemory: totalMemory,
+ FreeMemory: freeMemory,
+ },
+ // Free memory reporting on Windows is not reliable until we bump to ROCm v6.2
+ UnreliableFreeMemory: true,
+
+ ID: strconv.Itoa(i), // TODO this is probably wrong if we specify visible devices
+ DependencyPath: libDir,
+ MinimumMemory: rocmMinimumMemory,
+ Name: name,
+ Compute: gfx,
+ DriverMajor: driverMajor,
+ DriverMinor: driverMinor,
+ },
+ index: i,
+ }
+
+ resp = append(resp, gpuInfo)
+ }
+
+ return resp
+}
+
+func AMDValidateLibDir() (string, error) {
+ libDir, err := commonAMDValidateLibDir()
+ if err == nil {
+ return libDir, nil
+ }
+
+ // Installer payload (if we're running from some other location)
+ localAppData := os.Getenv("LOCALAPPDATA")
+ appDir := filepath.Join(localAppData, "Programs", "Ollama")
+ rocmTargetDir := filepath.Join(appDir, "rocm")
+ if rocmLibUsable(rocmTargetDir) {
+ slog.Debug("detected ollama installed ROCm at " + rocmTargetDir)
+ return rocmTargetDir, nil
+ }
+
+ // Should not happen on windows since we include it in the installer, but stand-alone binary might hit this
+ slog.Warn("amdgpu detected, but no compatible rocm library found. Please install ROCm")
+ return "", errors.New("no suitable rocm found, falling back to CPU")
+}
+
+func (gpus RocmGPUInfoList) RefreshFreeMemory() error {
+ if len(gpus) == 0 {
+ return nil
+ }
+ hl, err := NewHipLib()
+ if err != nil {
+ slog.Debug(err.Error())
+ return nil
+ }
+ defer hl.Release()
+
+ for i := range gpus {
+ err := hl.HipSetDevice(gpus[i].index)
+ if err != nil {
+ return err
+ }
+ freeMemory, _, err := hl.HipMemGetInfo()
+ if err != nil {
+ slog.Warn("get mem info", "id", i, "error", err)
+ continue
+ }
+ slog.Debug("updating rocm free memory", "gpu", gpus[i].ID, "name", gpus[i].Name, "before", format.HumanBytes2(gpus[i].FreeMemory), "now", format.HumanBytes2(freeMemory))
+ gpus[i].FreeMemory = freeMemory
+ }
+ return nil
+}
diff --git a/ollama/gpu/assets.go b/ollama/gpu/assets.go
new file mode 100755
index 0000000000000000000000000000000000000000..6d62d0dca983b677e0533aca4363285bbf9d3253
--- /dev/null
+++ b/ollama/gpu/assets.go
@@ -0,0 +1,148 @@
+package gpu
+
+import (
+ "errors"
+ "fmt"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "syscall"
+ "time"
+
+ "github.com/ollama/ollama/envconfig"
+)
+
+var (
+ lock sync.Mutex
+ payloadsDir = ""
+)
+
+func PayloadsDir() (string, error) {
+ lock.Lock()
+ defer lock.Unlock()
+ var err error
+ if payloadsDir == "" {
+ runnersDir := envconfig.RunnersDir()
+
+ if runnersDir != "" {
+ payloadsDir = runnersDir
+ return payloadsDir, nil
+ }
+
+ // The remainder only applies on non-windows where we still carry payloads in the main executable
+ cleanupTmpDirs()
+ tmpDir := envconfig.TmpDir()
+ if tmpDir == "" {
+ tmpDir, err = os.MkdirTemp("", "ollama")
+ if err != nil {
+ return "", fmt.Errorf("failed to generate tmp dir: %w", err)
+ }
+ } else {
+ err = os.MkdirAll(tmpDir, 0o755)
+ if err != nil {
+ return "", fmt.Errorf("failed to generate tmp dir %s: %w", tmpDir, err)
+ }
+ }
+
+ // Track our pid so we can clean up orphaned tmpdirs
+ n := filepath.Join(tmpDir, "ollama.pid")
+ if err := os.WriteFile(n, []byte(strconv.Itoa(os.Getpid())), 0o644); err != nil {
+ return "", fmt.Errorf("failed to write pid file %s: %w", n, err)
+ }
+
+ // We create a distinct subdirectory for payloads within the tmpdir
+ // This will typically look like /tmp/ollama3208993108/runners on linux
+ payloadsDir = filepath.Join(tmpDir, "runners")
+ }
+ return payloadsDir, nil
+}
+
+// Best effort to clean up prior tmpdirs
+func cleanupTmpDirs() {
+ matches, err := filepath.Glob(filepath.Join(os.TempDir(), "ollama*", "ollama.pid"))
+ if err != nil {
+ return
+ }
+
+ for _, match := range matches {
+ raw, err := os.ReadFile(match)
+ if errors.Is(err, os.ErrNotExist) {
+ slog.Debug("not a ollama runtime directory, skipping", "path", match)
+ continue
+ } else if err != nil {
+ slog.Warn("could not read ollama.pid, skipping", "path", match, "error", err)
+ continue
+ }
+
+ pid, err := strconv.Atoi(string(raw))
+ if err != nil {
+ slog.Warn("invalid pid, skipping", "path", match, "error", err)
+ continue
+ }
+
+ p, err := os.FindProcess(pid)
+ if err == nil && !errors.Is(p.Signal(syscall.Signal(0)), os.ErrProcessDone) {
+ slog.Warn("process still running, skipping", "pid", pid, "path", match)
+ continue
+ }
+
+ if err := os.Remove(match); err != nil {
+ slog.Warn("could not cleanup stale pidfile", "path", match, "error", err)
+ }
+
+ runners := filepath.Join(filepath.Dir(match), "runners")
+ if err := os.RemoveAll(runners); err != nil {
+ slog.Warn("could not cleanup stale runners", "path", runners, "error", err)
+ }
+
+ if err := os.Remove(filepath.Dir(match)); err != nil {
+ slog.Warn("could not cleanup stale tmpdir", "path", filepath.Dir(match), "error", err)
+ }
+ }
+}
+
+func Cleanup() {
+ lock.Lock()
+ defer lock.Unlock()
+ runnersDir := envconfig.RunnersDir()
+ if payloadsDir != "" && runnersDir == "" && runtime.GOOS != "windows" {
+ // We want to fully clean up the tmpdir parent of the payloads dir
+ tmpDir := filepath.Clean(filepath.Join(payloadsDir, ".."))
+ slog.Debug("cleaning up", "dir", tmpDir)
+ err := os.RemoveAll(tmpDir)
+ if err != nil {
+ // On windows, if we remove too quickly the llama.dll may still be in-use and fail to remove
+ time.Sleep(1000 * time.Millisecond)
+ err = os.RemoveAll(tmpDir)
+ if err != nil {
+ slog.Warn("failed to clean up", "dir", tmpDir, "err", err)
+ }
+ }
+ }
+}
+
+func UpdatePath(dir string) {
+ if runtime.GOOS == "windows" {
+ tmpDir := filepath.Dir(dir)
+ pathComponents := strings.Split(os.Getenv("PATH"), ";")
+ i := 0
+ for _, comp := range pathComponents {
+ if strings.EqualFold(comp, dir) {
+ return
+ }
+ // Remove any other prior paths to our temp dir
+ if !strings.HasPrefix(strings.ToLower(comp), strings.ToLower(tmpDir)) {
+ pathComponents[i] = comp
+ i++
+ }
+ }
+ newPath := strings.Join(append([]string{dir}, pathComponents...), ";")
+ slog.Info("updating", "PATH", newPath)
+ os.Setenv("PATH", newPath)
+ }
+ // linux and darwin rely on rpath
+}
diff --git a/ollama/gpu/cpu_common.go b/ollama/gpu/cpu_common.go
new file mode 100755
index 0000000000000000000000000000000000000000..34edcdc5a01d2afd429a38c31964c5e2fca0afe0
--- /dev/null
+++ b/ollama/gpu/cpu_common.go
@@ -0,0 +1,37 @@
+package gpu
+
+import (
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "golang.org/x/sys/cpu"
+)
+
+func GetCPUCapability() CPUCapability {
+ if cpu.X86.HasAVX2 {
+ return CPUCapabilityAVX2
+ }
+ if cpu.X86.HasAVX {
+ return CPUCapabilityAVX
+ }
+ // else LCD
+ return CPUCapabilityNone
+}
+
+func IsNUMA() bool {
+ if runtime.GOOS != "linux" {
+ // numa support in llama.cpp is linux only
+ return false
+ }
+ ids := map[string]interface{}{}
+ packageIds, _ := filepath.Glob("/sys/devices/system/cpu/cpu*/topology/physical_package_id")
+ for _, packageId := range packageIds {
+ id, err := os.ReadFile(packageId)
+ if err == nil {
+ ids[strings.TrimSpace(string(id))] = struct{}{}
+ }
+ }
+ return len(ids) > 1
+}
diff --git a/ollama/gpu/cuda_common.go b/ollama/gpu/cuda_common.go
new file mode 100755
index 0000000000000000000000000000000000000000..c90a644c48eeec1dfcb94d076e7349c216b7bd2e
--- /dev/null
+++ b/ollama/gpu/cuda_common.go
@@ -0,0 +1,21 @@
+//go:build linux || windows
+
+package gpu
+
+import (
+ "log/slog"
+ "strings"
+)
+
+func cudaGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) {
+ ids := []string{}
+ for _, info := range gpuInfo {
+ if info.Library != "cuda" {
+ // TODO shouldn't happen if things are wired correctly...
+ slog.Debug("cudaGetVisibleDevicesEnv skipping over non-cuda device", "library", info.Library)
+ continue
+ }
+ ids = append(ids, info.ID)
+ }
+ return "CUDA_VISIBLE_DEVICES", strings.Join(ids, ",")
+}
diff --git a/ollama/gpu/gpu.go b/ollama/gpu/gpu.go
new file mode 100755
index 0000000000000000000000000000000000000000..dc124a3ed5981a7a22279ea6a5690e0c9bdfb1f4
--- /dev/null
+++ b/ollama/gpu/gpu.go
@@ -0,0 +1,643 @@
+//go:build linux || windows
+
+package gpu
+
+/*
+#cgo linux LDFLAGS: -lrt -lpthread -ldl -lstdc++ -lm
+#cgo windows LDFLAGS: -lpthread
+
+#include "gpu_info.h"
+*/
+import "C"
+
+import (
+ "fmt"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+ "unsafe"
+
+ "github.com/ollama/ollama/envconfig"
+ "github.com/ollama/ollama/format"
+)
+
+type cudaHandles struct {
+ deviceCount int
+ cudart *C.cudart_handle_t
+ nvcuda *C.nvcuda_handle_t
+ nvml *C.nvml_handle_t
+}
+
+type oneapiHandles struct {
+ oneapi *C.oneapi_handle_t
+ deviceCount int
+}
+
+const (
+ cudaMinimumMemory = 457 * format.MebiByte
+ rocmMinimumMemory = 457 * format.MebiByte
+ // TODO OneAPI minimum memory
+)
+
+var (
+ gpuMutex sync.Mutex
+ bootstrapped bool
+ cpuCapability CPUCapability
+ cpus []CPUInfo
+ cudaGPUs []CudaGPUInfo
+ nvcudaLibPath string
+ cudartLibPath string
+ oneapiLibPath string
+ nvmlLibPath string
+ rocmGPUs []RocmGPUInfo
+ oneapiGPUs []OneapiGPUInfo
+)
+
+// With our current CUDA compile flags, older than 5.0 will not work properly
+var CudaComputeMin = [2]C.int{5, 0}
+
+var RocmComputeMin = 9
+
+// TODO find a better way to detect iGPU instead of minimum memory
+const IGPUMemLimit = 1 * format.GibiByte // 512G is what they typically report, so anything less than 1G must be iGPU
+
+// Jetson devices have JETSON_JETPACK="x.y.z" factory set to the Jetpack version installed.
+// Included to drive logic for reducing Ollama-allocated overhead on L4T/Jetson devices.
+var CudaTegra string = os.Getenv("JETSON_JETPACK")
+
+// Note: gpuMutex must already be held
+func initCudaHandles() *cudaHandles {
+ // TODO - if the ollama build is CPU only, don't do these checks as they're irrelevant and confusing
+
+ cHandles := &cudaHandles{}
+ // Short Circuit if we already know which library to use
+ if nvmlLibPath != "" {
+ cHandles.nvml, _ = LoadNVMLMgmt([]string{nvmlLibPath})
+ return cHandles
+ }
+ if nvcudaLibPath != "" {
+ cHandles.deviceCount, cHandles.nvcuda, _ = LoadNVCUDAMgmt([]string{nvcudaLibPath})
+ return cHandles
+ }
+ if cudartLibPath != "" {
+ cHandles.deviceCount, cHandles.cudart, _ = LoadCUDARTMgmt([]string{cudartLibPath})
+ return cHandles
+ }
+
+ slog.Debug("searching for GPU discovery libraries for NVIDIA")
+ var cudartMgmtPatterns []string
+
+ // Aligned with driver, we can't carry as payloads
+ nvcudaMgmtPatterns := NvcudaGlobs
+
+ if runtime.GOOS == "windows" {
+ localAppData := os.Getenv("LOCALAPPDATA")
+ cudartMgmtPatterns = []string{filepath.Join(localAppData, "Programs", "Ollama", CudartMgmtName)}
+ }
+ tmpDir, _ := PayloadsDir()
+ if tmpDir != "" {
+ // TODO - add "payloads" for subprocess
+ cudartMgmtPatterns = []string{filepath.Join(tmpDir, "cuda*", CudartMgmtName)}
+ }
+ cudartMgmtPatterns = append(cudartMgmtPatterns, CudartGlobs...)
+
+ if len(NvmlGlobs) > 0 {
+ nvmlLibPaths := FindGPULibs(NvmlMgmtName, NvmlGlobs)
+ if len(nvmlLibPaths) > 0 {
+ nvml, libPath := LoadNVMLMgmt(nvmlLibPaths)
+ if nvml != nil {
+ slog.Debug("nvidia-ml loaded", "library", libPath)
+ cHandles.nvml = nvml
+ nvmlLibPath = libPath
+ }
+ }
+ }
+
+ nvcudaLibPaths := FindGPULibs(NvcudaMgmtName, nvcudaMgmtPatterns)
+ if len(nvcudaLibPaths) > 0 {
+ deviceCount, nvcuda, libPath := LoadNVCUDAMgmt(nvcudaLibPaths)
+ if nvcuda != nil {
+ slog.Debug("detected GPUs", "count", deviceCount, "library", libPath)
+ cHandles.nvcuda = nvcuda
+ cHandles.deviceCount = deviceCount
+ nvcudaLibPath = libPath
+ return cHandles
+ }
+ }
+
+ cudartLibPaths := FindGPULibs(CudartMgmtName, cudartMgmtPatterns)
+ if len(cudartLibPaths) > 0 {
+ deviceCount, cudart, libPath := LoadCUDARTMgmt(cudartLibPaths)
+ if cudart != nil {
+ slog.Debug("detected GPUs", "library", libPath, "count", deviceCount)
+ cHandles.cudart = cudart
+ cHandles.deviceCount = deviceCount
+ cudartLibPath = libPath
+ return cHandles
+ }
+ }
+
+ return cHandles
+}
+
+// Note: gpuMutex must already be held
+func initOneAPIHandles() *oneapiHandles {
+ oHandles := &oneapiHandles{}
+
+ // Short Circuit if we already know which library to use
+ if oneapiLibPath != "" {
+ oHandles.deviceCount, oHandles.oneapi, _ = LoadOneapiMgmt([]string{oneapiLibPath})
+ return oHandles
+ }
+
+ oneapiLibPaths := FindGPULibs(OneapiMgmtName, OneapiGlobs)
+ if len(oneapiLibPaths) > 0 {
+ oHandles.deviceCount, oHandles.oneapi, oneapiLibPath = LoadOneapiMgmt(oneapiLibPaths)
+ }
+
+ return oHandles
+}
+
+func GetCPUInfo() GpuInfoList {
+ gpuMutex.Lock()
+ if !bootstrapped {
+ gpuMutex.Unlock()
+ GetGPUInfo()
+ } else {
+ gpuMutex.Unlock()
+ }
+ return GpuInfoList{cpus[0].GpuInfo}
+}
+
+func GetGPUInfo() GpuInfoList {
+ // TODO - consider exploring lspci (and equivalent on windows) to check for
+ // GPUs so we can report warnings if we see Nvidia/AMD but fail to load the libraries
+ gpuMutex.Lock()
+ defer gpuMutex.Unlock()
+ needRefresh := true
+ var cHandles *cudaHandles
+ var oHandles *oneapiHandles
+ defer func() {
+ if cHandles != nil {
+ if cHandles.cudart != nil {
+ C.cudart_release(*cHandles.cudart)
+ }
+ if cHandles.nvcuda != nil {
+ C.nvcuda_release(*cHandles.nvcuda)
+ }
+ if cHandles.nvml != nil {
+ C.nvml_release(*cHandles.nvml)
+ }
+ }
+ if oHandles != nil {
+ if oHandles.oneapi != nil {
+ // TODO - is this needed?
+ C.oneapi_release(*oHandles.oneapi)
+ }
+ }
+ }()
+
+ if !bootstrapped {
+ slog.Info("looking for compatible GPUs")
+ needRefresh = false
+ cpuCapability = GetCPUCapability()
+ var memInfo C.mem_info_t
+
+ mem, err := GetCPUMem()
+ if err != nil {
+ slog.Warn("error looking up system memory", "error", err)
+ }
+ cpus = []CPUInfo{
+ {
+ GpuInfo: GpuInfo{
+ memInfo: mem,
+ Library: "cpu",
+ Variant: cpuCapability,
+ ID: "0",
+ },
+ },
+ }
+
+ // Fallback to CPU mode if we're lacking required vector extensions on x86
+ if cpuCapability < GPURunnerCPUCapability && runtime.GOARCH == "amd64" {
+ slog.Warn("CPU does not have minimum vector extensions, GPU inference disabled", "required", GPURunnerCPUCapability, "detected", cpuCapability)
+ bootstrapped = true
+ // No need to do any GPU discovery, since we can't run on them
+ return GpuInfoList{cpus[0].GpuInfo}
+ }
+
+ // On windows we bundle the nvidia library one level above the runner dir
+ depPath := ""
+ if runtime.GOOS == "windows" && envconfig.RunnersDir() != "" {
+ depPath = filepath.Join(filepath.Dir(envconfig.RunnersDir()), "cuda")
+ }
+
+ // Load ALL libraries
+ cHandles = initCudaHandles()
+
+ // NVIDIA
+ for i := range cHandles.deviceCount {
+ if cHandles.cudart != nil || cHandles.nvcuda != nil {
+ gpuInfo := CudaGPUInfo{
+ GpuInfo: GpuInfo{
+ Library: "cuda",
+ },
+ index: i,
+ }
+ var driverMajor int
+ var driverMinor int
+ if cHandles.cudart != nil {
+ C.cudart_bootstrap(*cHandles.cudart, C.int(i), &memInfo)
+ } else {
+ C.nvcuda_bootstrap(*cHandles.nvcuda, C.int(i), &memInfo)
+ driverMajor = int(cHandles.nvcuda.driver_major)
+ driverMinor = int(cHandles.nvcuda.driver_minor)
+ }
+ if memInfo.err != nil {
+ slog.Info("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err))
+ C.free(unsafe.Pointer(memInfo.err))
+ continue
+ }
+ if memInfo.major < CudaComputeMin[0] || (memInfo.major == CudaComputeMin[0] && memInfo.minor < CudaComputeMin[1]) {
+ slog.Info(fmt.Sprintf("[%d] CUDA GPU is too old. Compute Capability detected: %d.%d", i, memInfo.major, memInfo.minor))
+ continue
+ }
+ gpuInfo.TotalMemory = uint64(memInfo.total)
+ gpuInfo.FreeMemory = uint64(memInfo.free)
+ gpuInfo.ID = C.GoString(&memInfo.gpu_id[0])
+ gpuInfo.Compute = fmt.Sprintf("%d.%d", memInfo.major, memInfo.minor)
+ gpuInfo.MinimumMemory = cudaMinimumMemory
+ gpuInfo.DependencyPath = depPath
+ gpuInfo.Name = C.GoString(&memInfo.gpu_name[0])
+ gpuInfo.DriverMajor = driverMajor
+ gpuInfo.DriverMinor = driverMinor
+
+ // query the management library as well so we can record any skew between the two
+ // which represents overhead on the GPU we must set aside on subsequent updates
+ if cHandles.nvml != nil {
+ C.nvml_get_free(*cHandles.nvml, C.int(gpuInfo.index), &memInfo.free, &memInfo.total, &memInfo.used)
+ if memInfo.err != nil {
+ slog.Warn("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err))
+ C.free(unsafe.Pointer(memInfo.err))
+ } else {
+ if memInfo.free != 0 && uint64(memInfo.free) > gpuInfo.FreeMemory {
+ gpuInfo.OSOverhead = uint64(memInfo.free) - gpuInfo.FreeMemory
+ slog.Info("detected OS VRAM overhead",
+ "id", gpuInfo.ID,
+ "library", gpuInfo.Library,
+ "compute", gpuInfo.Compute,
+ "driver", fmt.Sprintf("%d.%d", gpuInfo.DriverMajor, gpuInfo.DriverMinor),
+ "name", gpuInfo.Name,
+ "overhead", format.HumanBytes2(gpuInfo.OSOverhead),
+ )
+ }
+ }
+ }
+
+ // TODO potentially sort on our own algorithm instead of what the underlying GPU library does...
+ cudaGPUs = append(cudaGPUs, gpuInfo)
+ }
+ }
+
+ // Intel
+ if envconfig.IntelGPU() {
+ oHandles = initOneAPIHandles()
+ if oHandles != nil && oHandles.oneapi != nil {
+
+ // On windows we bundle the oneapi library one level above the runner dir
+ depPath = ""
+ if runtime.GOOS == "windows" && envconfig.RunnersDir() != "" {
+ depPath = filepath.Join(filepath.Dir(envconfig.RunnersDir()), "oneapi")
+ }
+
+ for d := range oHandles.oneapi.num_drivers {
+ if oHandles.oneapi == nil {
+ // shouldn't happen
+ slog.Warn("nil oneapi handle with driver count", "count", int(oHandles.oneapi.num_drivers))
+ continue
+ }
+ devCount := C.oneapi_get_device_count(*oHandles.oneapi, C.int(d))
+ for i := range devCount {
+ gpuInfo := OneapiGPUInfo{
+ GpuInfo: GpuInfo{
+ Library: "oneapi",
+ },
+ driverIndex: int(d),
+ gpuIndex: int(i),
+ }
+ // TODO - split bootstrapping from updating free memory
+ C.oneapi_check_vram(*oHandles.oneapi, C.int(d), i, &memInfo)
+ // TODO - convert this to MinimumMemory based on testing...
+ var totalFreeMem float64 = float64(memInfo.free) * 0.95 // work-around: leave some reserve vram for mkl lib used in ggml-sycl backend.
+ memInfo.free = C.uint64_t(totalFreeMem)
+ gpuInfo.TotalMemory = uint64(memInfo.total)
+ gpuInfo.FreeMemory = uint64(memInfo.free)
+ gpuInfo.ID = C.GoString(&memInfo.gpu_id[0])
+ gpuInfo.Name = C.GoString(&memInfo.gpu_name[0])
+ gpuInfo.DependencyPath = depPath
+ oneapiGPUs = append(oneapiGPUs, gpuInfo)
+ }
+ }
+ }
+ }
+
+ rocmGPUs = AMDGetGPUInfo()
+ bootstrapped = true
+ if len(cudaGPUs) == 0 && len(rocmGPUs) == 0 && len(oneapiGPUs) == 0 {
+ slog.Info("no compatible GPUs were discovered")
+ }
+ }
+
+ // For detected GPUs, load library if not loaded
+
+ // Refresh free memory usage
+ if needRefresh {
+ mem, err := GetCPUMem()
+ if err != nil {
+ slog.Warn("error looking up system memory", "error", err)
+ } else {
+ slog.Debug("updating system memory data",
+ slog.Group(
+ "before",
+ "total", format.HumanBytes2(cpus[0].TotalMemory),
+ "free", format.HumanBytes2(cpus[0].FreeMemory),
+ "free_swap", format.HumanBytes2(cpus[0].FreeSwap),
+ ),
+ slog.Group(
+ "now",
+ "total", format.HumanBytes2(mem.TotalMemory),
+ "free", format.HumanBytes2(mem.FreeMemory),
+ "free_swap", format.HumanBytes2(mem.FreeSwap),
+ ),
+ )
+ cpus[0].FreeMemory = mem.FreeMemory
+ cpus[0].FreeSwap = mem.FreeSwap
+ }
+
+ var memInfo C.mem_info_t
+ if cHandles == nil && len(cudaGPUs) > 0 {
+ cHandles = initCudaHandles()
+ }
+ for i, gpu := range cudaGPUs {
+ if cHandles.nvml != nil {
+ C.nvml_get_free(*cHandles.nvml, C.int(gpu.index), &memInfo.free, &memInfo.total, &memInfo.used)
+ } else if cHandles.cudart != nil {
+ C.cudart_bootstrap(*cHandles.cudart, C.int(gpu.index), &memInfo)
+ } else if cHandles.nvcuda != nil {
+ C.nvcuda_get_free(*cHandles.nvcuda, C.int(gpu.index), &memInfo.free, &memInfo.total)
+ memInfo.used = memInfo.total - memInfo.free
+ } else {
+ // shouldn't happen
+ slog.Warn("no valid cuda library loaded to refresh vram usage")
+ break
+ }
+ if memInfo.err != nil {
+ slog.Warn("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err))
+ C.free(unsafe.Pointer(memInfo.err))
+ continue
+ }
+ if memInfo.free == 0 {
+ slog.Warn("error looking up nvidia GPU memory")
+ continue
+ }
+ if cHandles.nvml != nil && gpu.OSOverhead > 0 {
+ // When using the management library update based on recorded overhead
+ memInfo.free -= C.uint64_t(gpu.OSOverhead)
+ }
+ slog.Debug("updating cuda memory data",
+ "gpu", gpu.ID,
+ "name", gpu.Name,
+ "overhead", format.HumanBytes2(gpu.OSOverhead),
+ slog.Group(
+ "before",
+ "total", format.HumanBytes2(gpu.TotalMemory),
+ "free", format.HumanBytes2(gpu.FreeMemory),
+ ),
+ slog.Group(
+ "now",
+ "total", format.HumanBytes2(uint64(memInfo.total)),
+ "free", format.HumanBytes2(uint64(memInfo.free)),
+ "used", format.HumanBytes2(uint64(memInfo.used)),
+ ),
+ )
+ cudaGPUs[i].FreeMemory = uint64(memInfo.free)
+ }
+
+ if oHandles == nil && len(oneapiGPUs) > 0 {
+ oHandles = initOneAPIHandles()
+ }
+ for i, gpu := range oneapiGPUs {
+ if oHandles.oneapi == nil {
+ // shouldn't happen
+ slog.Warn("nil oneapi handle with device count", "count", oHandles.deviceCount)
+ continue
+ }
+ C.oneapi_check_vram(*oHandles.oneapi, C.int(gpu.driverIndex), C.int(gpu.gpuIndex), &memInfo)
+ // TODO - convert this to MinimumMemory based on testing...
+ var totalFreeMem float64 = float64(memInfo.free) * 0.95 // work-around: leave some reserve vram for mkl lib used in ggml-sycl backend.
+ memInfo.free = C.uint64_t(totalFreeMem)
+ oneapiGPUs[i].FreeMemory = uint64(memInfo.free)
+ }
+
+ err = RocmGPUInfoList(rocmGPUs).RefreshFreeMemory()
+ if err != nil {
+ slog.Debug("problem refreshing ROCm free memory", "error", err)
+ }
+ }
+
+ resp := []GpuInfo{}
+ for _, gpu := range cudaGPUs {
+ resp = append(resp, gpu.GpuInfo)
+ }
+ for _, gpu := range rocmGPUs {
+ resp = append(resp, gpu.GpuInfo)
+ }
+ for _, gpu := range oneapiGPUs {
+ resp = append(resp, gpu.GpuInfo)
+ }
+ if len(resp) == 0 {
+ resp = append(resp, cpus[0].GpuInfo)
+ }
+ return resp
+}
+
+func FindGPULibs(baseLibName string, defaultPatterns []string) []string {
+ // Multiple GPU libraries may exist, and some may not work, so keep trying until we exhaust them
+ var ldPaths []string
+ var patterns []string
+ gpuLibPaths := []string{}
+ slog.Debug("Searching for GPU library", "name", baseLibName)
+
+ switch runtime.GOOS {
+ case "windows":
+ ldPaths = strings.Split(os.Getenv("PATH"), ";")
+ case "linux":
+ ldPaths = strings.Split(os.Getenv("LD_LIBRARY_PATH"), ":")
+ default:
+ return gpuLibPaths
+ }
+ // Start with whatever we find in the PATH/LD_LIBRARY_PATH
+ for _, ldPath := range ldPaths {
+ d, err := filepath.Abs(ldPath)
+ if err != nil {
+ continue
+ }
+ patterns = append(patterns, filepath.Join(d, baseLibName+"*"))
+ }
+ patterns = append(patterns, defaultPatterns...)
+ slog.Debug("gpu library search", "globs", patterns)
+ for _, pattern := range patterns {
+
+ // Nvidia PhysX known to return bogus results
+ if strings.Contains(pattern, "PhysX") {
+ slog.Debug("skipping PhysX cuda library path", "path", pattern)
+ continue
+ }
+ // Ignore glob discovery errors
+ matches, _ := filepath.Glob(pattern)
+ for _, match := range matches {
+ // Resolve any links so we don't try the same lib multiple times
+ // and weed out any dups across globs
+ libPath := match
+ tmp := match
+ var err error
+ for ; err == nil; tmp, err = os.Readlink(libPath) {
+ if !filepath.IsAbs(tmp) {
+ tmp = filepath.Join(filepath.Dir(libPath), tmp)
+ }
+ libPath = tmp
+ }
+ new := true
+ for _, cmp := range gpuLibPaths {
+ if cmp == libPath {
+ new = false
+ break
+ }
+ }
+ if new {
+ gpuLibPaths = append(gpuLibPaths, libPath)
+ }
+ }
+ }
+ slog.Debug("discovered GPU libraries", "paths", gpuLibPaths)
+ return gpuLibPaths
+}
+
+func LoadCUDARTMgmt(cudartLibPaths []string) (int, *C.cudart_handle_t, string) {
+ var resp C.cudart_init_resp_t
+ resp.ch.verbose = getVerboseState()
+ for _, libPath := range cudartLibPaths {
+ lib := C.CString(libPath)
+ defer C.free(unsafe.Pointer(lib))
+ C.cudart_init(lib, &resp)
+ if resp.err != nil {
+ slog.Debug("Unable to load cudart", "library", libPath, "error", C.GoString(resp.err))
+ C.free(unsafe.Pointer(resp.err))
+ } else {
+ return int(resp.num_devices), &resp.ch, libPath
+ }
+ }
+ return 0, nil, ""
+}
+
+func LoadNVCUDAMgmt(nvcudaLibPaths []string) (int, *C.nvcuda_handle_t, string) {
+ var resp C.nvcuda_init_resp_t
+ resp.ch.verbose = getVerboseState()
+ for _, libPath := range nvcudaLibPaths {
+ lib := C.CString(libPath)
+ defer C.free(unsafe.Pointer(lib))
+ C.nvcuda_init(lib, &resp)
+ if resp.err != nil {
+ // Decide what log level based on the type of error message to help users understand why
+ msg := C.GoString(resp.err)
+ switch resp.cudaErr {
+ case C.CUDA_ERROR_INSUFFICIENT_DRIVER, C.CUDA_ERROR_SYSTEM_DRIVER_MISMATCH:
+ slog.Warn("version mismatch between driver and cuda driver library - reboot or upgrade may be required", "library", libPath, "error", msg)
+ case C.CUDA_ERROR_NO_DEVICE:
+ slog.Info("no nvidia devices detected", "library", libPath)
+ case C.CUDA_ERROR_UNKNOWN:
+ slog.Warn("unknown error initializing cuda driver library", "library", libPath, "error", msg)
+ slog.Warn("see https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md for more information")
+ default:
+ if strings.Contains(msg, "wrong ELF class") {
+ slog.Debug("skipping 32bit library", "library", libPath)
+ } else {
+ slog.Info("unable to load cuda driver library", "library", libPath, "error", msg)
+ }
+ }
+ C.free(unsafe.Pointer(resp.err))
+ } else {
+ return int(resp.num_devices), &resp.ch, libPath
+ }
+ }
+ return 0, nil, ""
+}
+
+func LoadNVMLMgmt(nvmlLibPaths []string) (*C.nvml_handle_t, string) {
+ var resp C.nvml_init_resp_t
+ resp.ch.verbose = getVerboseState()
+ for _, libPath := range nvmlLibPaths {
+ lib := C.CString(libPath)
+ defer C.free(unsafe.Pointer(lib))
+ C.nvml_init(lib, &resp)
+ if resp.err != nil {
+ slog.Info(fmt.Sprintf("Unable to load NVML management library %s: %s", libPath, C.GoString(resp.err)))
+ C.free(unsafe.Pointer(resp.err))
+ } else {
+ return &resp.ch, libPath
+ }
+ }
+ return nil, ""
+}
+
+func LoadOneapiMgmt(oneapiLibPaths []string) (int, *C.oneapi_handle_t, string) {
+ var resp C.oneapi_init_resp_t
+ num_devices := 0
+ resp.oh.verbose = getVerboseState()
+ for _, libPath := range oneapiLibPaths {
+ lib := C.CString(libPath)
+ defer C.free(unsafe.Pointer(lib))
+ C.oneapi_init(lib, &resp)
+ if resp.err != nil {
+ slog.Debug("Unable to load oneAPI management library", "library", libPath, "error", C.GoString(resp.err))
+ C.free(unsafe.Pointer(resp.err))
+ } else {
+ for i := range resp.oh.num_drivers {
+ num_devices += int(C.oneapi_get_device_count(resp.oh, C.int(i)))
+ }
+ return num_devices, &resp.oh, libPath
+ }
+ }
+ return 0, nil, ""
+}
+
+func getVerboseState() C.uint16_t {
+ if envconfig.Debug() {
+ return C.uint16_t(1)
+ }
+ return C.uint16_t(0)
+}
+
+// Given the list of GPUs this instantiation is targeted for,
+// figure out the visible devices environment variable
+//
+// If different libraries are detected, the first one is what we use
+func (l GpuInfoList) GetVisibleDevicesEnv() (string, string) {
+ if len(l) == 0 {
+ return "", ""
+ }
+ switch l[0].Library {
+ case "cuda":
+ return cudaGetVisibleDevicesEnv(l)
+ case "rocm":
+ return rocmGetVisibleDevicesEnv(l)
+ case "oneapi":
+ return oneapiGetVisibleDevicesEnv(l)
+ default:
+ slog.Debug("no filter required for library " + l[0].Library)
+ return "", ""
+ }
+}
diff --git a/ollama/gpu/gpu_darwin.go b/ollama/gpu/gpu_darwin.go
new file mode 100755
index 0000000000000000000000000000000000000000..9d9fd84ebf47f275e9de09b7a975d6d4021ceb63
--- /dev/null
+++ b/ollama/gpu/gpu_darwin.go
@@ -0,0 +1,68 @@
+//go:build darwin
+
+package gpu
+
+/*
+#cgo CFLAGS: -x objective-c
+#cgo LDFLAGS: -framework Foundation -framework CoreGraphics -framework Metal
+#include "gpu_info_darwin.h"
+*/
+import "C"
+
+import (
+ "runtime"
+
+ "github.com/ollama/ollama/format"
+)
+
+const (
+ metalMinimumMemory = 512 * format.MebiByte
+)
+
+func GetGPUInfo() GpuInfoList {
+ mem, _ := GetCPUMem()
+ if runtime.GOARCH == "amd64" {
+ return []GpuInfo{
+ {
+ Library: "cpu",
+ Variant: GetCPUCapability(),
+ memInfo: mem,
+ },
+ }
+ }
+ info := GpuInfo{
+ Library: "metal",
+ ID: "0",
+ }
+ info.TotalMemory = uint64(C.getRecommendedMaxVRAM())
+
+ // TODO is there a way to gather actual allocated video memory? (currentAllocatedSize doesn't work)
+ info.FreeMemory = info.TotalMemory
+
+ info.MinimumMemory = metalMinimumMemory
+ return []GpuInfo{info}
+}
+
+func GetCPUInfo() GpuInfoList {
+ mem, _ := GetCPUMem()
+ return []GpuInfo{
+ {
+ Library: "cpu",
+ Variant: GetCPUCapability(),
+ memInfo: mem,
+ },
+ }
+}
+
+func GetCPUMem() (memInfo, error) {
+ return memInfo{
+ TotalMemory: uint64(C.getPhysicalMemory()),
+ FreeMemory: uint64(C.getFreeMemory()),
+ // FreeSwap omitted as Darwin uses dynamic paging
+ }, nil
+}
+
+func (l GpuInfoList) GetVisibleDevicesEnv() (string, string) {
+ // No-op on darwin
+ return "", ""
+}
diff --git a/ollama/gpu/gpu_info.h b/ollama/gpu/gpu_info.h
new file mode 100755
index 0000000000000000000000000000000000000000..094b791a88595cc10c565400348e749c247f9e81
--- /dev/null
+++ b/ollama/gpu/gpu_info.h
@@ -0,0 +1,70 @@
+#ifndef __APPLE__
+#ifndef __GPU_INFO_H__
+#define __GPU_INFO_H__
+#include
+#include
+#include
+
+#ifndef _WIN32
+#include
+#define LOAD_LIBRARY(lib, flags) dlopen(lib, flags)
+#define LOAD_SYMBOL(handle, sym) dlsym(handle, sym)
+#define LOAD_ERR() strdup(dlerror())
+#define UNLOAD_LIBRARY(handle) dlclose(handle)
+#else
+#include
+#define LOAD_LIBRARY(lib, flags) LoadLibrary(lib)
+#define LOAD_SYMBOL(handle, sym) GetProcAddress(handle, sym)
+#define UNLOAD_LIBRARY(handle) FreeLibrary(handle)
+#define LOAD_ERR() ({\
+ LPSTR messageBuffer = NULL; \
+ size_t size = FormatMessageA(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, \
+ NULL, GetLastError(), MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&messageBuffer, 0, NULL); \
+ char *resp = strdup(messageBuffer); \
+ LocalFree(messageBuffer); \
+ resp; \
+})
+
+#endif
+
+#define LOG(verbose, ...) \
+ do { \
+ if (verbose) { \
+ fprintf(stderr, __VA_ARGS__); \
+ } \
+ } while (0)
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define GPU_ID_LEN 64
+#define GPU_NAME_LEN 96
+
+typedef struct mem_info {
+ char *err; // If non-nill, caller responsible for freeing
+ char gpu_id[GPU_ID_LEN];
+ char gpu_name[GPU_NAME_LEN];
+ uint64_t total;
+ uint64_t free;
+ uint64_t used;
+
+ // Compute Capability
+ int major;
+ int minor;
+ int patch;
+} mem_info_t;
+
+void cpu_check_ram(mem_info_t *resp);
+
+#ifdef __cplusplus
+}
+#endif
+
+#include "gpu_info_cudart.h"
+#include "gpu_info_nvcuda.h"
+#include "gpu_info_nvml.h"
+#include "gpu_info_oneapi.h"
+
+#endif // __GPU_INFO_H__
+#endif // __APPLE__
diff --git a/ollama/gpu/gpu_info_cudart.c b/ollama/gpu/gpu_info_cudart.c
new file mode 100755
index 0000000000000000000000000000000000000000..03f15a2c3e09859d3a8b080efc1e90ec32b9056e
--- /dev/null
+++ b/ollama/gpu/gpu_info_cudart.c
@@ -0,0 +1,183 @@
+#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs?
+
+#include
+#include "gpu_info_cudart.h"
+
+void cudart_init(char *cudart_lib_path, cudart_init_resp_t *resp) {
+ cudartReturn_t ret;
+ resp->err = NULL;
+ resp->num_devices = 0;
+ const int buflen = 256;
+ char buf[buflen + 1];
+ int i;
+
+ struct lookup {
+ char *s;
+ void **p;
+ } l[] = {
+ {"cudaSetDevice", (void *)&resp->ch.cudaSetDevice},
+ {"cudaDeviceSynchronize", (void *)&resp->ch.cudaDeviceSynchronize},
+ {"cudaDeviceReset", (void *)&resp->ch.cudaDeviceReset},
+ {"cudaMemGetInfo", (void *)&resp->ch.cudaMemGetInfo},
+ {"cudaGetDeviceCount", (void *)&resp->ch.cudaGetDeviceCount},
+ {"cudaDeviceGetAttribute", (void *)&resp->ch.cudaDeviceGetAttribute},
+ {"cudaDriverGetVersion", (void *)&resp->ch.cudaDriverGetVersion},
+ {"cudaGetDeviceProperties", (void *)&resp->ch.cudaGetDeviceProperties},
+ {NULL, NULL},
+ };
+
+ resp->ch.handle = LOAD_LIBRARY(cudart_lib_path, RTLD_LAZY);
+ if (!resp->ch.handle) {
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "library %s load err: %s\n", cudart_lib_path, msg);
+ snprintf(buf, buflen,
+ "Unable to load %s library to query for Nvidia GPUs: %s",
+ cudart_lib_path, msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ for (i = 0; l[i].s != NULL; i++) {
+ *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s);
+ if (!*(l[i].p)) {
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "dlerr: %s\n", msg);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s,
+ msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+ }
+
+ ret = (*resp->ch.cudaSetDevice)(0);
+ if (ret != CUDART_SUCCESS) {
+ LOG(resp->ch.verbose, "cudaSetDevice err: %d\n", ret);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ if (ret == CUDA_ERROR_INSUFFICIENT_DRIVER) {
+ resp->err = strdup("your nvidia driver is too old or missing. If you have a CUDA GPU please upgrade to run ollama");
+ return;
+ }
+ snprintf(buf, buflen, "cudart init failure: %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ int version = 0;
+ cudartDriverVersion_t driverVersion;
+ driverVersion.major = 0;
+ driverVersion.minor = 0;
+
+ // Report driver version if we're in verbose mode, ignore errors
+ ret = (*resp->ch.cudaDriverGetVersion)(&version);
+ if (ret != CUDART_SUCCESS) {
+ LOG(resp->ch.verbose, "cudaDriverGetVersion failed: %d\n", ret);
+ } else {
+ driverVersion.major = version / 1000;
+ driverVersion.minor = (version - (driverVersion.major * 1000)) / 10;
+ LOG(resp->ch.verbose, "CUDA driver version: %d-%d\n", driverVersion.major, driverVersion.minor);
+ }
+
+ ret = (*resp->ch.cudaGetDeviceCount)(&resp->num_devices);
+ if (ret != CUDART_SUCCESS) {
+ LOG(resp->ch.verbose, "cudaGetDeviceCount err: %d\n", ret);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "unable to get device count: %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+}
+
+
+void cudart_bootstrap(cudart_handle_t h, int i, mem_info_t *resp) {
+ resp->err = NULL;
+ cudartMemory_t memInfo = {0,0,0};
+ cudartReturn_t ret;
+ const int buflen = 256;
+ char buf[buflen + 1];
+
+ if (h.handle == NULL) {
+ resp->err = strdup("cudart handle isn't initialized");
+ return;
+ }
+
+ ret = (*h.cudaSetDevice)(i);
+ if (ret != CUDART_SUCCESS) {
+ snprintf(buf, buflen, "cudart device failed to initialize");
+ resp->err = strdup(buf);
+ return;
+ }
+
+ cudaDeviceProp_t props;
+ ret = (*h.cudaGetDeviceProperties)(&props, i);
+ if (ret != CUDART_SUCCESS) {
+ LOG(h.verbose, "[%d] device properties lookup failure: %d\n", i, ret);
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i);
+ resp->major = 0;
+ resp->minor = 0;
+ } else {
+ int allNull = 1;
+ for (int j = 0; j < 16; j++) {
+ if (props.uuid.bytes[j] != 0) {
+ allNull = 0;
+ break;
+ }
+ }
+ if (allNull != 0) {
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i);
+ } else {
+ // GPU-d110a105-ac29-1d54-7b49-9c90440f215b
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN,
+ "GPU-%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x",
+ props.uuid.bytes[0],
+ props.uuid.bytes[1],
+ props.uuid.bytes[2],
+ props.uuid.bytes[3],
+ props.uuid.bytes[4],
+ props.uuid.bytes[5],
+ props.uuid.bytes[6],
+ props.uuid.bytes[7],
+ props.uuid.bytes[8],
+ props.uuid.bytes[9],
+ props.uuid.bytes[10],
+ props.uuid.bytes[11],
+ props.uuid.bytes[12],
+ props.uuid.bytes[13],
+ props.uuid.bytes[14],
+ props.uuid.bytes[15]
+ );
+ }
+ resp->major = props.major;
+ resp->minor = props.minor;
+
+ // TODO add other useful properties from props
+ }
+ ret = (*h.cudaMemGetInfo)(&memInfo.free, &memInfo.total);
+ if (ret != CUDART_SUCCESS) {
+ snprintf(buf, buflen, "cudart device memory info lookup failure %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ resp->total = memInfo.total;
+ resp->free = memInfo.free;
+ resp->used = memInfo.used;
+
+ LOG(h.verbose, "[%s] CUDA totalMem %lu\n", resp->gpu_id, resp->total);
+ LOG(h.verbose, "[%s] CUDA freeMem %lu\n", resp->gpu_id, resp->free);
+ LOG(h.verbose, "[%s] CUDA usedMem %lu\n", resp->gpu_id, resp->used);
+ LOG(h.verbose, "[%s] Compute Capability %d.%d\n", resp->gpu_id, resp->major, resp->minor);
+}
+
+void cudart_release(cudart_handle_t h) {
+ LOG(h.verbose, "releasing cudart library\n");
+ UNLOAD_LIBRARY(h.handle);
+ h.handle = NULL;
+}
+
+#endif // __APPLE__
\ No newline at end of file
diff --git a/ollama/gpu/gpu_info_cudart.h b/ollama/gpu/gpu_info_cudart.h
new file mode 100755
index 0000000000000000000000000000000000000000..ff0c0af19f993ab5a4d0731a3c1d7df33958a3fb
--- /dev/null
+++ b/ollama/gpu/gpu_info_cudart.h
@@ -0,0 +1,148 @@
+#ifndef __APPLE__
+#ifndef __GPU_INFO_CUDART_H__
+#define __GPU_INFO_CUDART_H__
+#include "gpu_info.h"
+
+// Just enough typedef's to dlopen/dlsym for memory information
+typedef enum cudartReturn_enum {
+ CUDART_SUCCESS = 0,
+ CUDART_ERROR_INVALID_VALUE = 1,
+ CUDART_ERROR_MEMORY_ALLOCATION = 2,
+ CUDART_ERROR_INSUFFICIENT_DRIVER = 35,
+ // Other values omitted for now...
+} cudartReturn_t;
+
+typedef enum cudartDeviceAttr_enum {
+ cudartDevAttrComputeCapabilityMajor = 75,
+ cudartDevAttrComputeCapabilityMinor = 76,
+
+ // TODO - not yet wired up but may be useful for Jetson or other
+ // integrated GPU scenarios with shared memory
+ cudaDevAttrIntegrated = 18
+
+} cudartDeviceAttr_t;
+
+typedef void *cudartDevice_t; // Opaque is sufficient
+typedef struct cudartMemory_st {
+ size_t total;
+ size_t free;
+ size_t used;
+} cudartMemory_t;
+
+typedef struct cudartDriverVersion {
+ int major;
+ int minor;
+} cudartDriverVersion_t;
+
+typedef struct cudaUUID {
+ unsigned char bytes[16];
+} cudaUUID_t;
+typedef struct cudaDeviceProp {
+ char name[256]; /**< ASCII string identifying device */
+ cudaUUID_t uuid; /**< 16-byte unique identifier */
+ char luid[8]; /**< 8-byte locally unique identifier. Value is undefined on TCC and non-Windows platforms */
+ unsigned int luidDeviceNodeMask; /**< LUID device node mask. Value is undefined on TCC and non-Windows platforms */
+ size_t totalGlobalMem; /**< Global memory available on device in bytes */
+ size_t sharedMemPerBlock; /**< Shared memory available per block in bytes */
+ int regsPerBlock; /**< 32-bit registers available per block */
+ int warpSize; /**< Warp size in threads */
+ size_t memPitch; /**< Maximum pitch in bytes allowed by memory copies */
+ int maxThreadsPerBlock; /**< Maximum number of threads per block */
+ int maxThreadsDim[3]; /**< Maximum size of each dimension of a block */
+ int maxGridSize[3]; /**< Maximum size of each dimension of a grid */
+ int clockRate; /**< Clock frequency in kilohertz */
+ size_t totalConstMem; /**< Constant memory available on device in bytes */
+ int major; /**< Major compute capability */
+ int minor; /**< Minor compute capability */
+ size_t textureAlignment; /**< Alignment requirement for textures */
+ size_t texturePitchAlignment; /**< Pitch alignment requirement for texture references bound to pitched memory */
+ int deviceOverlap; /**< Device can concurrently copy memory and execute a kernel. Deprecated. Use instead asyncEngineCount. */
+ int multiProcessorCount; /**< Number of multiprocessors on device */
+ int kernelExecTimeoutEnabled; /**< Specified whether there is a run time limit on kernels */
+ int integrated; /**< Device is integrated as opposed to discrete */
+ int canMapHostMemory; /**< Device can map host memory with cudaHostAlloc/cudaHostGetDevicePointer */
+ int computeMode; /**< Compute mode (See ::cudaComputeMode) */
+ int maxTexture1D; /**< Maximum 1D texture size */
+ int maxTexture1DMipmap; /**< Maximum 1D mipmapped texture size */
+ int maxTexture1DLinear; /**< Deprecated, do not use. Use cudaDeviceGetTexture1DLinearMaxWidth() or cuDeviceGetTexture1DLinearMaxWidth() instead. */
+ int maxTexture2D[2]; /**< Maximum 2D texture dimensions */
+ int maxTexture2DMipmap[2]; /**< Maximum 2D mipmapped texture dimensions */
+ int maxTexture2DLinear[3]; /**< Maximum dimensions (width, height, pitch) for 2D textures bound to pitched memory */
+ int maxTexture2DGather[2]; /**< Maximum 2D texture dimensions if texture gather operations have to be performed */
+ int maxTexture3D[3]; /**< Maximum 3D texture dimensions */
+ int maxTexture3DAlt[3]; /**< Maximum alternate 3D texture dimensions */
+ int maxTextureCubemap; /**< Maximum Cubemap texture dimensions */
+ int maxTexture1DLayered[2]; /**< Maximum 1D layered texture dimensions */
+ int maxTexture2DLayered[3]; /**< Maximum 2D layered texture dimensions */
+ int maxTextureCubemapLayered[2];/**< Maximum Cubemap layered texture dimensions */
+ int maxSurface1D; /**< Maximum 1D surface size */
+ int maxSurface2D[2]; /**< Maximum 2D surface dimensions */
+ int maxSurface3D[3]; /**< Maximum 3D surface dimensions */
+ int maxSurface1DLayered[2]; /**< Maximum 1D layered surface dimensions */
+ int maxSurface2DLayered[3]; /**< Maximum 2D layered surface dimensions */
+ int maxSurfaceCubemap; /**< Maximum Cubemap surface dimensions */
+ int maxSurfaceCubemapLayered[2];/**< Maximum Cubemap layered surface dimensions */
+ size_t surfaceAlignment; /**< Alignment requirements for surfaces */
+ int concurrentKernels; /**< Device can possibly execute multiple kernels concurrently */
+ int ECCEnabled; /**< Device has ECC support enabled */
+ int pciBusID; /**< PCI bus ID of the device */
+ int pciDeviceID; /**< PCI device ID of the device */
+ int pciDomainID; /**< PCI domain ID of the device */
+ int tccDriver; /**< 1 if device is a Tesla device using TCC driver, 0 otherwise */
+ int asyncEngineCount; /**< Number of asynchronous engines */
+ int unifiedAddressing; /**< Device shares a unified address space with the host */
+ int memoryClockRate; /**< Peak memory clock frequency in kilohertz */
+ int memoryBusWidth; /**< Global memory bus width in bits */
+ int l2CacheSize; /**< Size of L2 cache in bytes */
+ int persistingL2CacheMaxSize; /**< Device's maximum l2 persisting lines capacity setting in bytes */
+ int maxThreadsPerMultiProcessor;/**< Maximum resident threads per multiprocessor */
+ int streamPrioritiesSupported; /**< Device supports stream priorities */
+ int globalL1CacheSupported; /**< Device supports caching globals in L1 */
+ int localL1CacheSupported; /**< Device supports caching locals in L1 */
+ size_t sharedMemPerMultiprocessor; /**< Shared memory available per multiprocessor in bytes */
+ int regsPerMultiprocessor; /**< 32-bit registers available per multiprocessor */
+ int managedMemory; /**< Device supports allocating managed memory on this system */
+ int isMultiGpuBoard; /**< Device is on a multi-GPU board */
+ int multiGpuBoardGroupID; /**< Unique identifier for a group of devices on the same multi-GPU board */
+ int hostNativeAtomicSupported; /**< Link between the device and the host supports native atomic operations */
+ int singleToDoublePrecisionPerfRatio; /**< Ratio of single precision performance (in floating-point operations per second) to double precision performance */
+ int pageableMemoryAccess; /**< Device supports coherently accessing pageable memory without calling cudaHostRegister on it */
+ int concurrentManagedAccess; /**< Device can coherently access managed memory concurrently with the CPU */
+ int computePreemptionSupported; /**< Device supports Compute Preemption */
+ int canUseHostPointerForRegisteredMem; /**< Device can access host registered memory at the same virtual address as the CPU */
+ int cooperativeLaunch; /**< Device supports launching cooperative kernels via ::cudaLaunchCooperativeKernel */
+ int cooperativeMultiDeviceLaunch; /**< Deprecated, cudaLaunchCooperativeKernelMultiDevice is deprecated. */
+ size_t sharedMemPerBlockOptin; /**< Per device maximum shared memory per block usable by special opt in */
+ int pageableMemoryAccessUsesHostPageTables; /**< Device accesses pageable memory via the host's page tables */
+ int directManagedMemAccessFromHost; /**< Host can directly access managed memory on the device without migration. */
+ int maxBlocksPerMultiProcessor; /**< Maximum number of resident blocks per multiprocessor */
+ int accessPolicyMaxWindowSize; /**< The maximum value of ::cudaAccessPolicyWindow::num_bytes. */
+ size_t reservedSharedMemPerBlock; /**< Shared memory reserved by CUDA driver per block in bytes */
+ } cudaDeviceProp_t;
+
+typedef struct cudart_handle {
+ void *handle;
+ uint16_t verbose;
+ cudartReturn_t (*cudaSetDevice)(int device);
+ cudartReturn_t (*cudaDeviceSynchronize)(void);
+ cudartReturn_t (*cudaDeviceReset)(void);
+ cudartReturn_t (*cudaMemGetInfo)(size_t *, size_t *);
+ cudartReturn_t (*cudaGetDeviceCount)(int *);
+ cudartReturn_t (*cudaDeviceGetAttribute)(int* value, cudartDeviceAttr_t attr, int device);
+ cudartReturn_t (*cudaDriverGetVersion) (int *driverVersion);
+ cudartReturn_t (*cudaGetDeviceProperties) (cudaDeviceProp_t* prop, int device);
+} cudart_handle_t;
+
+typedef struct cudart_init_resp {
+ char *err; // If err is non-null handle is invalid
+ cudart_handle_t ch;
+ int num_devices;
+} cudart_init_resp_t;
+
+void cudart_init(char *cudart_lib_path, cudart_init_resp_t *resp);
+void cudart_bootstrap(cudart_handle_t ch, int device_id, mem_info_t *resp);
+// TODO - if we keep this library longer term, add cudart_get_free
+void cudart_release(cudart_handle_t ch);
+
+#endif // __GPU_INFO_CUDART_H__
+#endif // __APPLE__
diff --git a/ollama/gpu/gpu_info_darwin.h b/ollama/gpu/gpu_info_darwin.h
new file mode 100755
index 0000000000000000000000000000000000000000..415e7922d7dbaa34e77124c876ac4889edd878df
--- /dev/null
+++ b/ollama/gpu/gpu_info_darwin.h
@@ -0,0 +1,5 @@
+#import
+#include
+uint64_t getRecommendedMaxVRAM();
+uint64_t getPhysicalMemory();
+uint64_t getFreeMemory();
diff --git a/ollama/gpu/gpu_info_darwin.m b/ollama/gpu/gpu_info_darwin.m
new file mode 100755
index 0000000000000000000000000000000000000000..5ca139e0b215eb779114b332859d3e3494d0f9c5
--- /dev/null
+++ b/ollama/gpu/gpu_info_darwin.m
@@ -0,0 +1,35 @@
+#import
+#import
+#include "gpu_info_darwin.h"
+
+uint64_t getRecommendedMaxVRAM() {
+ id device = MTLCreateSystemDefaultDevice();
+ uint64_t result = device.recommendedMaxWorkingSetSize;
+ CFRelease(device);
+ return result;
+}
+
+// getPhysicalMemory returns the total physical memory in bytes
+uint64_t getPhysicalMemory() {
+ return [NSProcessInfo processInfo].physicalMemory;
+}
+
+// getFreeMemory returns the total free memory in bytes, including inactive
+// memory that can be reclaimed by the system.
+uint64_t getFreeMemory() {
+ mach_port_t host_port = mach_host_self();
+ mach_msg_type_number_t host_size = sizeof(vm_statistics64_data_t) / sizeof(integer_t);
+ vm_size_t pagesize;
+ vm_statistics64_data_t vm_stat;
+
+ host_page_size(host_port, &pagesize);
+ if (host_statistics64(host_port, HOST_VM_INFO64, (host_info64_t)&vm_stat, &host_size) != KERN_SUCCESS) {
+ return 0;
+ }
+
+ uint64_t free_memory = (uint64_t)vm_stat.free_count * pagesize;
+ free_memory += (uint64_t)vm_stat.speculative_count * pagesize;
+ free_memory += (uint64_t)vm_stat.inactive_count * pagesize;
+
+ return free_memory;
+}
diff --git a/ollama/gpu/gpu_info_nvcuda.c b/ollama/gpu/gpu_info_nvcuda.c
new file mode 100755
index 0000000000000000000000000000000000000000..a1a38bfc27215a132120d6f4319665101289b747
--- /dev/null
+++ b/ollama/gpu/gpu_info_nvcuda.c
@@ -0,0 +1,243 @@
+#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs?
+
+#include
+#include "gpu_info_nvcuda.h"
+
+void nvcuda_init(char *nvcuda_lib_path, nvcuda_init_resp_t *resp) {
+ CUresult ret;
+ resp->err = NULL;
+ resp->num_devices = 0;
+ resp->cudaErr = CUDA_SUCCESS;
+ const int buflen = 256;
+ char buf[buflen + 1];
+ int i;
+
+ struct lookup {
+ char *s;
+ void **p;
+ } l[] = {
+
+ {"cuInit", (void *)&resp->ch.cuInit},
+ {"cuDriverGetVersion", (void *)&resp->ch.cuDriverGetVersion},
+ {"cuDeviceGetCount", (void *)&resp->ch.cuDeviceGetCount},
+ {"cuDeviceGet", (void *)&resp->ch.cuDeviceGet},
+ {"cuDeviceGetAttribute", (void *)&resp->ch.cuDeviceGetAttribute},
+ {"cuDeviceGetUuid", (void *)&resp->ch.cuDeviceGetUuid},
+ {"cuDeviceGetName", (void *)&resp->ch.cuDeviceGetName},
+ {"cuCtxCreate_v3", (void *)&resp->ch.cuCtxCreate_v3},
+ {"cuMemGetInfo_v2", (void *)&resp->ch.cuMemGetInfo_v2},
+ {"cuCtxDestroy", (void *)&resp->ch.cuCtxDestroy},
+ {NULL, NULL},
+ };
+
+ resp->ch.handle = LOAD_LIBRARY(nvcuda_lib_path, RTLD_LAZY);
+ if (!resp->ch.handle) {
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "library %s load err: %s\n", nvcuda_lib_path, msg);
+ snprintf(buf, buflen,
+ "Unable to load %s library to query for Nvidia GPUs: %s",
+ nvcuda_lib_path, msg);
+ free(msg);
+ resp->err = strdup(buf);
+ resp->cudaErr = -1;
+ return;
+ }
+
+ for (i = 0; l[i].s != NULL; i++) {
+ *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s);
+ if (!*(l[i].p)) {
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "dlerr: %s\n", msg);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s,
+ msg);
+ free(msg);
+ resp->err = strdup(buf);
+ resp->cudaErr = -1;
+ return;
+ }
+ }
+
+ ret = (*resp->ch.cuInit)(0);
+ if (ret != CUDA_SUCCESS) {
+ LOG(resp->ch.verbose, "cuInit err: %d\n", ret);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "cuda driver library init failure: %d", ret);
+ resp->err = strdup(buf);
+ resp->cudaErr = ret;
+ return;
+ }
+
+ int version = 0;
+ resp->ch.driver_major = 0;
+ resp->ch.driver_minor = 0;
+
+ // Report driver version if we're in verbose mode, ignore errors
+ ret = (*resp->ch.cuDriverGetVersion)(&version);
+ if (ret != CUDA_SUCCESS) {
+ LOG(resp->ch.verbose, "cuDriverGetVersion failed: %d\n", ret);
+ } else {
+ resp->ch.driver_major = version / 1000;
+ resp->ch.driver_minor = (version - (resp->ch.driver_major * 1000)) / 10;
+ LOG(resp->ch.verbose, "CUDA driver version: %d.%d\n", resp->ch.driver_major, resp->ch.driver_minor);
+ }
+
+ ret = (*resp->ch.cuDeviceGetCount)(&resp->num_devices);
+ if (ret != CUDA_SUCCESS) {
+ LOG(resp->ch.verbose, "cuDeviceGetCount err: %d\n", ret);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "unable to get device count: %d", ret);
+ resp->err = strdup(buf);
+ resp->cudaErr = ret;
+ return;
+ }
+}
+
+const int buflen = 256;
+void nvcuda_bootstrap(nvcuda_handle_t h, int i, mem_info_t *resp) {
+ resp->err = NULL;
+ nvcudaMemory_t memInfo = {0,0};
+ CUresult ret;
+ CUdevice device = -1;
+ CUcontext ctx = NULL;
+ char buf[buflen + 1];
+ CUuuid uuid = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
+
+ if (h.handle == NULL) {
+ resp->err = strdup("cuda driver library handle isn't initialized");
+ return;
+ }
+
+ ret = (*h.cuDeviceGet)(&device, i);
+ if (ret != CUDA_SUCCESS) {
+ snprintf(buf, buflen, "cuda driver library device failed to initialize");
+ resp->err = strdup(buf);
+ return;
+ }
+
+ int major = 0;
+ int minor = 0;
+ ret = (*h.cuDeviceGetAttribute)(&major, CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR, device);
+ if (ret != CUDA_SUCCESS) {
+ LOG(h.verbose, "[%d] device major lookup failure: %d\n", i, ret);
+ } else {
+ ret = (*h.cuDeviceGetAttribute)(&minor, CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR, device);
+ if (ret != CUDA_SUCCESS) {
+ LOG(h.verbose, "[%d] device minor lookup failure: %d\n", i, ret);
+ } else {
+ resp->minor = minor;
+ resp->major = major;
+ }
+ }
+
+ ret = (*h.cuDeviceGetUuid)(&uuid, device);
+ if (ret != CUDA_SUCCESS) {
+ LOG(h.verbose, "[%d] device uuid lookup failure: %d\n", i, ret);
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", i);
+ } else {
+ // GPU-d110a105-ac29-1d54-7b49-9c90440f215b
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN,
+ "GPU-%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x",
+ uuid.bytes[0],
+ uuid.bytes[1],
+ uuid.bytes[2],
+ uuid.bytes[3],
+ uuid.bytes[4],
+ uuid.bytes[5],
+ uuid.bytes[6],
+ uuid.bytes[7],
+ uuid.bytes[8],
+ uuid.bytes[9],
+ uuid.bytes[10],
+ uuid.bytes[11],
+ uuid.bytes[12],
+ uuid.bytes[13],
+ uuid.bytes[14],
+ uuid.bytes[15]
+ );
+ }
+
+ ret = (*h.cuDeviceGetName)(&resp->gpu_name[0], GPU_NAME_LEN, device);
+ if (ret != CUDA_SUCCESS) {
+ LOG(h.verbose, "[%d] device name lookup failure: %d\n", i, ret);
+ resp->gpu_name[0] = '\0';
+ }
+
+ // To get memory we have to set (and release) a context
+ ret = (*h.cuCtxCreate_v3)(&ctx, NULL, 0, 0, device);
+ if (ret != CUDA_SUCCESS) {
+ snprintf(buf, buflen, "cuda driver library failed to get device context %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ ret = (*h.cuMemGetInfo_v2)(&memInfo.free, &memInfo.total);
+ if (ret != CUDA_SUCCESS) {
+ snprintf(buf, buflen, "cuda driver library device memory info lookup failure %d", ret);
+ resp->err = strdup(buf);
+ // Best effort on failure...
+ (*h.cuCtxDestroy)(ctx);
+ return;
+ }
+
+ resp->total = memInfo.total;
+ resp->free = memInfo.free;
+
+ LOG(h.verbose, "[%s] CUDA totalMem %lu mb\n", resp->gpu_id, resp->total / 1024 / 1024);
+ LOG(h.verbose, "[%s] CUDA freeMem %lu mb\n", resp->gpu_id, resp->free / 1024 / 1024);
+ LOG(h.verbose, "[%s] Compute Capability %d.%d\n", resp->gpu_id, resp->major, resp->minor);
+
+
+
+ ret = (*h.cuCtxDestroy)(ctx);
+ if (ret != CUDA_SUCCESS) {
+ LOG(1, "cuda driver library failed to release device context %d", ret);
+ }
+}
+
+void nvcuda_get_free(nvcuda_handle_t h, int i, uint64_t *free, uint64_t *total) {
+ CUresult ret;
+ CUcontext ctx = NULL;
+ CUdevice device = -1;
+ *free = 0;
+ *total = 0;
+
+ ret = (*h.cuDeviceGet)(&device, i);
+ if (ret != CUDA_SUCCESS) {
+ LOG(1, "cuda driver library device failed to initialize");
+ return;
+ }
+
+
+ // To get memory we have to set (and release) a context
+ ret = (*h.cuCtxCreate_v3)(&ctx, NULL, 0, 0, device);
+ if (ret != CUDA_SUCCESS) {
+ LOG(1, "cuda driver library failed to get device context %d", ret);
+ return;
+ }
+
+ ret = (*h.cuMemGetInfo_v2)(free, total);
+ if (ret != CUDA_SUCCESS) {
+ LOG(1, "cuda driver library device memory info lookup failure %d", ret);
+ // Best effort on failure...
+ (*h.cuCtxDestroy)(ctx);
+ return;
+ }
+
+ ret = (*h.cuCtxDestroy)(ctx);
+ if (ret != CUDA_SUCCESS) {
+ LOG(1, "cuda driver library failed to release device context %d", ret);
+ }
+}
+
+void nvcuda_release(nvcuda_handle_t h) {
+ LOG(h.verbose, "releasing cuda driver library\n");
+ UNLOAD_LIBRARY(h.handle);
+ // TODO and other context release logic?
+ h.handle = NULL;
+}
+
+#endif // __APPLE__
\ No newline at end of file
diff --git a/ollama/gpu/gpu_info_nvcuda.h b/ollama/gpu/gpu_info_nvcuda.h
new file mode 100755
index 0000000000000000000000000000000000000000..ef2fe8a305117d2c5c5260b759ab1373b07d25fc
--- /dev/null
+++ b/ollama/gpu/gpu_info_nvcuda.h
@@ -0,0 +1,79 @@
+#ifndef __APPLE__
+#ifndef __GPU_INFO_NVCUDA_H__
+#define __GPU_INFO_NVCUDA_H__
+#include "gpu_info.h"
+
+// Just enough typedef's to dlopen/dlsym for memory information
+typedef enum cudaError_enum {
+ CUDA_SUCCESS = 0,
+ CUDA_ERROR_INVALID_VALUE = 1,
+ CUDA_ERROR_OUT_OF_MEMORY = 2,
+ CUDA_ERROR_NOT_INITIALIZED = 3,
+ CUDA_ERROR_INSUFFICIENT_DRIVER = 35,
+ CUDA_ERROR_NO_DEVICE = 100,
+ CUDA_ERROR_SYSTEM_DRIVER_MISMATCH = 803,
+ CUDA_ERROR_UNKNOWN = 999,
+ // Other values omitted for now...
+} CUresult;
+
+typedef enum CUdevice_attribute_enum {
+ CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MAJOR = 75,
+ CU_DEVICE_ATTRIBUTE_COMPUTE_CAPABILITY_MINOR = 76,
+
+ // TODO - not yet wired up but may be useful for Jetson or other
+ // integrated GPU scenarios with shared memory
+ CU_DEVICE_ATTRIBUTE_INTEGRATED = 18
+
+} CUdevice_attribute;
+
+typedef void *nvcudaDevice_t; // Opaque is sufficient
+typedef struct nvcudaMemory_st {
+ uint64_t total;
+ uint64_t free;
+} nvcudaMemory_t;
+
+typedef struct nvcudaDriverVersion {
+ int major;
+ int minor;
+} nvcudaDriverVersion_t;
+
+typedef struct CUuuid_st {
+ unsigned char bytes[16];
+} CUuuid;
+
+typedef int CUdevice;
+typedef void* CUcontext;
+
+typedef struct nvcuda_handle {
+ void *handle;
+ uint16_t verbose;
+ int driver_major;
+ int driver_minor;
+ CUresult (*cuInit)(unsigned int Flags);
+ CUresult (*cuDriverGetVersion)(int *driverVersion);
+ CUresult (*cuDeviceGetCount)(int *);
+ CUresult (*cuDeviceGet)(CUdevice* device, int ordinal);
+ CUresult (*cuDeviceGetAttribute)(int* pi, CUdevice_attribute attrib, CUdevice dev);
+ CUresult (*cuDeviceGetUuid)(CUuuid* uuid, CUdevice dev); // signature compatible with cuDeviceGetUuid_v2
+ CUresult (*cuDeviceGetName)(char *name, int len, CUdevice dev);
+
+ // Context specific aspects
+ CUresult (*cuCtxCreate_v3)(CUcontext* pctx, void *params, int len, unsigned int flags, CUdevice dev);
+ CUresult (*cuMemGetInfo_v2)(uint64_t* free, uint64_t* total);
+ CUresult (*cuCtxDestroy)(CUcontext ctx);
+} nvcuda_handle_t;
+
+typedef struct nvcuda_init_resp {
+ char *err; // If err is non-null handle is invalid
+ nvcuda_handle_t ch;
+ int num_devices;
+ CUresult cudaErr;
+} nvcuda_init_resp_t;
+
+void nvcuda_init(char *nvcuda_lib_path, nvcuda_init_resp_t *resp);
+void nvcuda_bootstrap(nvcuda_handle_t ch, int device_id, mem_info_t *resp);
+void nvcuda_get_free(nvcuda_handle_t ch, int device_id, uint64_t *free, uint64_t *total);
+void nvcuda_release(nvcuda_handle_t ch);
+
+#endif // __GPU_INFO_NVCUDA_H__
+#endif // __APPLE__
diff --git a/ollama/gpu/gpu_info_nvml.c b/ollama/gpu/gpu_info_nvml.c
new file mode 100755
index 0000000000000000000000000000000000000000..11293e4482a750b5ffcce9da6d479faec7aca893
--- /dev/null
+++ b/ollama/gpu/gpu_info_nvml.c
@@ -0,0 +1,104 @@
+#ifndef __APPLE__ // TODO - maybe consider nvidia support on intel macs?
+
+#include
+
+#include "gpu_info_nvml.h"
+
+void nvml_init(char *nvml_lib_path, nvml_init_resp_t *resp) {
+ nvmlReturn_t ret;
+ resp->err = NULL;
+ const int buflen = 256;
+ char buf[buflen + 1];
+ int i;
+
+ struct lookup {
+ char *s;
+ void **p;
+ } l[] = {
+ {"nvmlInit_v2", (void *)&resp->ch.nvmlInit_v2},
+ {"nvmlShutdown", (void *)&resp->ch.nvmlShutdown},
+ {"nvmlDeviceGetHandleByIndex", (void *)&resp->ch.nvmlDeviceGetHandleByIndex},
+ {"nvmlDeviceGetMemoryInfo", (void *)&resp->ch.nvmlDeviceGetMemoryInfo},
+ {NULL, NULL},
+ };
+
+ resp->ch.handle = LOAD_LIBRARY(nvml_lib_path, RTLD_LAZY);
+ if (!resp->ch.handle) {
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "library %s load err: %s\n", nvml_lib_path, msg);
+ snprintf(buf, buflen,
+ "Unable to load %s library to query for Nvidia GPUs: %s",
+ nvml_lib_path, msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ // TODO once we've squashed the remaining corner cases remove this log
+ // LOG(resp->ch.verbose, "wiring nvidia management library functions in %s\n", nvml_lib_path);
+
+ for (i = 0; l[i].s != NULL; i++) {
+ // TODO once we've squashed the remaining corner cases remove this log
+ // LOG(resp->ch.verbose, "dlsym: %s\n", l[i].s);
+
+ *l[i].p = LOAD_SYMBOL(resp->ch.handle, l[i].s);
+ if (!*(l[i].p)) {
+ resp->ch.handle = NULL;
+ char *msg = LOAD_ERR();
+ LOG(resp->ch.verbose, "dlerr: %s\n", msg);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s,
+ msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+ }
+
+ ret = (*resp->ch.nvmlInit_v2)();
+ if (ret != NVML_SUCCESS) {
+ LOG(resp->ch.verbose, "nvmlInit_v2 err: %d\n", ret);
+ UNLOAD_LIBRARY(resp->ch.handle);
+ resp->ch.handle = NULL;
+ snprintf(buf, buflen, "nvml vram init failure: %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+}
+
+
+void nvml_get_free(nvml_handle_t h, int device_id, uint64_t *free, uint64_t *total, uint64_t *used) {
+ nvmlDevice_t device;
+ nvmlMemory_t memInfo = {0};
+ nvmlReturn_t ret;
+ ret = (*h.nvmlDeviceGetHandleByIndex)(device_id, &device);
+ if (ret != NVML_SUCCESS) {
+ LOG(1, "unable to get device handle %d: %d", device_id, ret);
+ *free = 0;
+ return;
+ }
+
+ ret = (*h.nvmlDeviceGetMemoryInfo)(device, &memInfo);
+ if (ret != NVML_SUCCESS) {
+ LOG(1, "device memory info lookup failure %d: %d", device_id, ret);
+ *free = 0;
+ return;
+ }
+ *free = memInfo.free;
+ *total = memInfo.total;
+ *used = memInfo.used;
+}
+
+
+void nvml_release(nvml_handle_t h) {
+ LOG(h.verbose, "releasing nvml library\n");
+ nvmlReturn_t ret;
+ ret = (*h.nvmlShutdown)();
+ if (ret != NVML_SUCCESS) {
+ LOG(1, "error during nvmlShutdown %d", ret);
+ }
+ UNLOAD_LIBRARY(h.handle);
+ h.handle = NULL;
+}
+
+#endif // __APPLE__
\ No newline at end of file
diff --git a/ollama/gpu/gpu_info_nvml.h b/ollama/gpu/gpu_info_nvml.h
new file mode 100755
index 0000000000000000000000000000000000000000..a661f723b365d493dc07e2e8c5a068f3a474e035
--- /dev/null
+++ b/ollama/gpu/gpu_info_nvml.h
@@ -0,0 +1,48 @@
+#ifndef __APPLE__
+#ifndef __GPU_INFO_NVML_H__
+#define __GPU_INFO_NVML_H__
+#include "gpu_info.h"
+
+// Just enough typedef's to dlopen/dlsym for memory information
+typedef enum nvmlReturn_enum {
+ NVML_SUCCESS = 0,
+ // Other values omitted for now...
+} nvmlReturn_t;
+typedef void *nvmlDevice_t; // Opaque is sufficient
+typedef struct nvmlMemory_st {
+ unsigned long long total;
+ unsigned long long free;
+ unsigned long long used;
+} nvmlMemory_t;
+
+typedef enum nvmlBrandType_enum
+{
+ NVML_BRAND_UNKNOWN = 0,
+} nvmlBrandType_t;
+
+typedef struct nvml_handle {
+ void *handle;
+ uint16_t verbose;
+ nvmlReturn_t (*nvmlInit_v2)(void);
+ nvmlReturn_t (*nvmlShutdown)(void);
+ nvmlReturn_t (*nvmlDeviceGetHandleByIndex)(unsigned int, nvmlDevice_t *);
+ nvmlReturn_t (*nvmlDeviceGetMemoryInfo)(nvmlDevice_t, nvmlMemory_t *);
+} nvml_handle_t;
+
+typedef struct nvml_init_resp {
+ char *err; // If err is non-null handle is invalid
+ nvml_handle_t ch;
+} nvml_init_resp_t;
+
+typedef struct nvml_compute_capability {
+ char *err;
+ int major;
+ int minor;
+} nvml_compute_capability_t;
+
+void nvml_init(char *nvml_lib_path, nvml_init_resp_t *resp);
+void nvml_get_free(nvml_handle_t ch, int device_id, uint64_t *free, uint64_t *total, uint64_t *used);
+void nvml_release(nvml_handle_t ch);
+
+#endif // __GPU_INFO_NVML_H__
+#endif // __APPLE__
\ No newline at end of file
diff --git a/ollama/gpu/gpu_info_oneapi.c b/ollama/gpu/gpu_info_oneapi.c
new file mode 100755
index 0000000000000000000000000000000000000000..3ff708ea20df7669192cf84792dc202c7c021e2f
--- /dev/null
+++ b/ollama/gpu/gpu_info_oneapi.c
@@ -0,0 +1,259 @@
+#ifndef __APPLE__
+
+#include "gpu_info_oneapi.h"
+
+#include
+
+void oneapi_init(char *oneapi_lib_path, oneapi_init_resp_t *resp) {
+ ze_result_t ret;
+ resp->err = NULL;
+ resp->oh.devices = NULL;
+ resp->oh.num_devices = NULL;
+ resp->oh.drivers = NULL;
+ resp->oh.num_drivers = 0;
+ const int buflen = 256;
+ char buf[buflen + 1];
+ int i, d;
+ struct lookup {
+ char *s;
+ void **p;
+ } l[] = {
+ {"zesInit", (void *)&resp->oh.zesInit},
+ {"zesDriverGet", (void *)&resp->oh.zesDriverGet},
+ {"zesDeviceGet", (void *)&resp->oh.zesDeviceGet},
+ {"zesDeviceGetProperties", (void *)&resp->oh.zesDeviceGetProperties},
+ {"zesDeviceEnumMemoryModules",
+ (void *)&resp->oh.zesDeviceEnumMemoryModules},
+ {"zesMemoryGetProperties", (void *)&resp->oh.zesMemoryGetProperties},
+ {"zesMemoryGetState", (void *)&resp->oh.zesMemoryGetState},
+ {NULL, NULL},
+ };
+
+ resp->oh.handle = LOAD_LIBRARY(oneapi_lib_path, RTLD_LAZY);
+ if (!resp->oh.handle) {
+ char *msg = LOAD_ERR();
+ snprintf(buf, buflen,
+ "Unable to load %s library to query for Intel GPUs: %s\n",
+ oneapi_lib_path, msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ // TODO once we've squashed the remaining corner cases remove this log
+ LOG(resp->oh.verbose,
+ "wiring Level-Zero management library functions in %s\n",
+ oneapi_lib_path);
+
+ for (i = 0; l[i].s != NULL; i++) {
+ // TODO once we've squashed the remaining corner cases remove this log
+ LOG(resp->oh.verbose, "dlsym: %s\n", l[i].s);
+
+ *l[i].p = LOAD_SYMBOL(resp->oh.handle, l[i].s);
+ if (!*(l[i].p)) {
+ resp->oh.handle = NULL;
+ char *msg = LOAD_ERR();
+ LOG(resp->oh.verbose, "dlerr: %s\n", msg);
+ UNLOAD_LIBRARY(resp->oh.handle);
+ snprintf(buf, buflen, "symbol lookup for %s failed: %s", l[i].s, msg);
+ free(msg);
+ resp->err = strdup(buf);
+ return;
+ }
+ }
+
+ LOG(resp->oh.verbose, "calling zesInit\n");
+
+ ret = (*resp->oh.zesInit)(0);
+ if (ret != ZE_RESULT_SUCCESS) {
+ LOG(resp->oh.verbose, "zesInit err: %x\n", ret);
+ snprintf(buf, buflen, "oneapi vram init failure: %x", ret);
+ resp->err = strdup(buf);
+ oneapi_release(resp->oh);
+ return;
+ }
+
+ LOG(resp->oh.verbose, "calling zesDriverGet\n");
+ ret = (*resp->oh.zesDriverGet)(&resp->oh.num_drivers, NULL);
+ if (ret != ZE_RESULT_SUCCESS) {
+ LOG(resp->oh.verbose, "zesDriverGet err: %x\n", ret);
+ snprintf(buf, buflen, "unable to get driver count: %x", ret);
+ resp->err = strdup(buf);
+ oneapi_release(resp->oh);
+ return;
+ }
+ LOG(resp->oh.verbose, "oneapi driver count: %d\n", resp->oh.num_drivers);
+ resp->oh.drivers = malloc(resp->oh.num_drivers * sizeof(zes_driver_handle_t));
+ resp->oh.num_devices = malloc(resp->oh.num_drivers * sizeof(uint32_t));
+ memset(&resp->oh.num_devices[0], 0, resp->oh.num_drivers * sizeof(uint32_t));
+ resp->oh.devices =
+ malloc(resp->oh.num_drivers * sizeof(zes_device_handle_t *));
+ ret = (*resp->oh.zesDriverGet)(&resp->oh.num_drivers, &resp->oh.drivers[0]);
+ if (ret != ZE_RESULT_SUCCESS) {
+ LOG(resp->oh.verbose, "zesDriverGet err: %x\n", ret);
+ snprintf(buf, buflen, "unable to get driver count: %x", ret);
+ resp->err = strdup(buf);
+ oneapi_release(resp->oh);
+ return;
+ }
+
+ for (d = 0; d < resp->oh.num_drivers; d++) {
+ LOG(resp->oh.verbose, "calling zesDeviceGet count %d: %p\n", d, resp->oh.drivers[d]);
+ ret = (*resp->oh.zesDeviceGet)(resp->oh.drivers[d],
+ &resp->oh.num_devices[d], NULL);
+ if (ret != ZE_RESULT_SUCCESS) {
+ LOG(resp->oh.verbose, "zesDeviceGet err: %x\n", ret);
+ snprintf(buf, buflen, "unable to get device count: %x", ret);
+ resp->err = strdup(buf);
+ oneapi_release(resp->oh);
+ return;
+ }
+ resp->oh.devices[d] =
+ malloc(resp->oh.num_devices[d] * sizeof(zes_device_handle_t));
+ ret = (*resp->oh.zesDeviceGet)(
+ resp->oh.drivers[d], &resp->oh.num_devices[d], resp->oh.devices[d]);
+ if (ret != ZE_RESULT_SUCCESS) {
+ LOG(resp->oh.verbose, "zesDeviceGet err: %x\n", ret);
+ snprintf(buf, buflen, "unable to get device count: %x", ret);
+ resp->err = strdup(buf);
+ oneapi_release(resp->oh);
+ return;
+ }
+ }
+
+ return;
+}
+
+void oneapi_check_vram(oneapi_handle_t h, int driver, int device,
+ mem_info_t *resp) {
+ ze_result_t ret;
+ resp->err = NULL;
+ uint64_t totalMem = 0;
+ uint64_t usedMem = 0;
+ const int buflen = 256;
+ char buf[buflen + 1];
+ int i, d, m;
+
+ if (h.handle == NULL) {
+ resp->err = strdup("Level-Zero handle not initialized");
+ return;
+ }
+
+ if (driver > h.num_drivers || device > h.num_devices[driver]) {
+ resp->err = strdup("driver of device index out of bounds");
+ return;
+ }
+
+ resp->total = 0;
+ resp->free = 0;
+
+ zes_device_ext_properties_t ext_props;
+ ext_props.stype = ZES_STRUCTURE_TYPE_DEVICE_EXT_PROPERTIES;
+ ext_props.pNext = NULL;
+
+ zes_device_properties_t props;
+ props.stype = ZES_STRUCTURE_TYPE_DEVICE_PROPERTIES;
+ props.pNext = &ext_props;
+
+ ret = (*h.zesDeviceGetProperties)(h.devices[driver][device], &props);
+ if (ret != ZE_RESULT_SUCCESS) {
+ snprintf(buf, buflen, "unable to get device properties: %d", ret);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ snprintf(&resp->gpu_name[0], GPU_NAME_LEN, "%s", props.modelName);
+
+ // TODO this needs to map to ONEAPI_DEVICE_SELECTOR syntax
+ // (this is probably wrong...)
+ // TODO - the driver isn't included - what if there are multiple drivers?
+ snprintf(&resp->gpu_id[0], GPU_ID_LEN, "%d", device);
+
+ if (h.verbose) {
+ // When in verbose mode, report more information about
+ // the card we discover.
+ LOG(h.verbose, "[%d:%d] oneAPI device name: %s\n", driver, device,
+ props.modelName);
+ LOG(h.verbose, "[%d:%d] oneAPI brand: %s\n", driver, device,
+ props.brandName);
+ LOG(h.verbose, "[%d:%d] oneAPI vendor: %s\n", driver, device,
+ props.vendorName);
+ LOG(h.verbose, "[%d:%d] oneAPI S/N: %s\n", driver, device,
+ props.serialNumber);
+ LOG(h.verbose, "[%d:%d] oneAPI board number: %s\n", driver, device,
+ props.boardNumber);
+ }
+
+ // TODO
+ // Compute Capability equivalent in resp->major, resp->minor, resp->patch
+
+ uint32_t memCount = 0;
+ ret = (*h.zesDeviceEnumMemoryModules)(h.devices[driver][device], &memCount,
+ NULL);
+ if (ret != ZE_RESULT_SUCCESS) {
+ snprintf(buf, buflen, "unable to enumerate Level-Zero memory modules: %x",
+ ret);
+ resp->err = strdup(buf);
+ return;
+ }
+
+ LOG(h.verbose, "discovered %d Level-Zero memory modules\n", memCount);
+
+ zes_mem_handle_t *mems = malloc(memCount * sizeof(zes_mem_handle_t));
+ (*h.zesDeviceEnumMemoryModules)(h.devices[driver][device], &memCount, mems);
+
+ for (m = 0; m < memCount; m++) {
+ zes_mem_state_t state;
+ state.stype = ZES_STRUCTURE_TYPE_MEM_STATE;
+ state.pNext = NULL;
+ ret = (*h.zesMemoryGetState)(mems[m], &state);
+ if (ret != ZE_RESULT_SUCCESS) {
+ snprintf(buf, buflen, "unable to get memory state: %x", ret);
+ resp->err = strdup(buf);
+ free(mems);
+ return;
+ }
+
+ resp->total += state.size;
+ resp->free += state.free;
+ }
+
+ free(mems);
+}
+
+void oneapi_release(oneapi_handle_t h) {
+ int d;
+ LOG(h.verbose, "releasing oneapi library\n");
+ for (d = 0; d < h.num_drivers; d++) {
+ if (h.devices != NULL && h.devices[d] != NULL) {
+ free(h.devices[d]);
+ }
+ }
+ if (h.devices != NULL) {
+ free(h.devices);
+ h.devices = NULL;
+ }
+ if (h.num_devices != NULL) {
+ free(h.num_devices);
+ h.num_devices = NULL;
+ }
+ if (h.drivers != NULL) {
+ free(h.drivers);
+ h.drivers = NULL;
+ }
+ h.num_drivers = 0;
+ UNLOAD_LIBRARY(h.handle);
+ h.handle = NULL;
+}
+
+int oneapi_get_device_count(oneapi_handle_t h, int driver) {
+ if (h.handle == NULL || h.num_devices == NULL) {
+ return 0;
+ }
+ if (driver > h.num_drivers) {
+ return 0;
+ }
+ return (int)h.num_devices[driver];
+}
+
+#endif // __APPLE__
diff --git a/ollama/gpu/gpu_info_oneapi.h b/ollama/gpu/gpu_info_oneapi.h
new file mode 100755
index 0000000000000000000000000000000000000000..97fcecd9c7c562854344dd3c8a316ebd035ac1bd
--- /dev/null
+++ b/ollama/gpu/gpu_info_oneapi.h
@@ -0,0 +1,203 @@
+#ifndef __APPLE__
+#ifndef __GPU_INFO_ONEAPI_H__
+#define __GPU_INFO_ONEAPI_H__
+#include "gpu_info.h"
+
+#define ZE_MAX_DEVICE_NAME 256
+#define ZE_MAX_DEVICE_UUID_SIZE 16
+#define ZES_STRING_PROPERTY_SIZE 64
+#define ZE_BIT(_i) (1 << _i)
+
+// Just enough typedef's to dlopen/dlsym for memory information
+typedef enum ze_result_t {
+ ZE_RESULT_SUCCESS = 0,
+ // Other values omitted for now...
+} ze_result_t;
+
+typedef uint8_t ze_bool_t;
+typedef struct _zes_driver_handle_t *zes_driver_handle_t;
+typedef struct _zes_device_handle_t *zes_device_handle_t;
+typedef struct _zes_mem_handle_t *zes_mem_handle_t;
+
+typedef enum _ze_structure_type_t {
+ ZE_STRUCTURE_TYPE_FORCE_UINT32 = 0x7fffffff
+} ze_structure_type_t;
+
+typedef enum _zes_structure_type_t {
+ ZES_STRUCTURE_TYPE_DEVICE_PROPERTIES = 0x1,
+ ZES_STRUCTURE_TYPE_MEM_PROPERTIES = 0xb,
+ ZES_STRUCTURE_TYPE_MEM_STATE = 0x1e,
+ ZES_STRUCTURE_TYPE_DEVICE_EXT_PROPERTIES = 0x2d,
+ ZES_STRUCTURE_TYPE_FORCE_UINT32 = 0x7fffffff
+} zes_structure_type_t;
+
+typedef enum _zes_mem_type_t {
+ ZES_MEM_TYPE_FORCE_UINT32 = 0x7fffffff
+} zes_mem_type_t;
+
+typedef enum _zes_mem_loc_t {
+ ZES_MEM_LOC_SYSTEM = 0,
+ ZES_MEM_LOC_DEVICE = 1,
+ ZES_MEM_LOC_FORCE_UINT32 = 0x7fffffff
+} zes_mem_loc_t;
+
+typedef enum _zes_mem_health_t {
+ ZES_MEM_HEALTH_FORCE_UINT32 = 0x7fffffff
+} zes_mem_health_t;
+
+typedef struct _ze_device_uuid_t {
+ uint8_t id[ZE_MAX_DEVICE_UUID_SIZE];
+} ze_device_uuid_t;
+
+typedef struct _zes_uuid_t {
+ uint8_t id[ZE_MAX_DEVICE_UUID_SIZE];
+} zes_uuid_t;
+
+typedef enum _ze_device_type_t {
+ ZE_DEVICE_TYPE_GPU = 1,
+ ZE_DEVICE_TYPE_CPU = 2,
+ ZE_DEVICE_TYPE_FPGA = 3,
+ ZE_DEVICE_TYPE_MCA = 4,
+ ZE_DEVICE_TYPE_VPU = 5,
+ ZE_DEVICE_TYPE_FORCE_UINT32 = 0x7fffffff
+} ze_device_type_t;
+
+typedef enum _zes_device_type_t {
+ ZES_DEVICE_TYPE_GPU = 1,
+ ZES_DEVICE_TYPE_CPU = 2,
+ ZES_DEVICE_TYPE_FPGA = 3,
+ ZES_DEVICE_TYPE_MCA = 4,
+ ZES_DEVICE_TYPE_VPU = 5,
+ ZES_DEVICE_TYPE_FORCE_UINT32 = 0x7fffffff
+} zes_device_type_t;
+
+typedef uint32_t ze_device_property_flags_t;
+typedef enum _ze_device_property_flag_t {
+ ZE_DEVICE_PROPERTY_FLAG_INTEGRATED = ZE_BIT(0),
+ ZE_DEVICE_PROPERTY_FLAG_SUBDEVICE = ZE_BIT(1),
+ ZE_DEVICE_PROPERTY_FLAG_ECC = ZE_BIT(2),
+ ZE_DEVICE_PROPERTY_FLAG_ONDEMANDPAGING = ZE_BIT(3),
+ ZE_DEVICE_PROPERTY_FLAG_FORCE_UINT32 = 0x7fffffff
+} ze_device_property_flag_t;
+
+typedef uint32_t zes_device_property_flags_t;
+typedef enum _zes_device_property_flag_t {
+ ZES_DEVICE_PROPERTY_FLAG_INTEGRATED = ZE_BIT(0),
+ ZES_DEVICE_PROPERTY_FLAG_SUBDEVICE = ZE_BIT(1),
+ ZES_DEVICE_PROPERTY_FLAG_ECC = ZE_BIT(2),
+ ZES_DEVICE_PROPERTY_FLAG_ONDEMANDPAGING = ZE_BIT(3),
+ ZES_DEVICE_PROPERTY_FLAG_FORCE_UINT32 = 0x7fffffff
+} zes_device_property_flag_t;
+
+typedef struct _ze_device_properties_t {
+ ze_structure_type_t stype;
+ void *pNext;
+ ze_device_type_t type;
+ uint32_t vendorId;
+ uint32_t deviceId;
+ ze_device_property_flags_t flags;
+ uint32_t subdeviceId;
+ uint32_t coreClockRate;
+ uint64_t maxMemAllocSize;
+ uint32_t maxHardwareContexts;
+ uint32_t maxCommandQueuePriority;
+ uint32_t numThreadsPerEU;
+ uint32_t physicalEUSimdWidth;
+ uint32_t numEUsPerSubslice;
+ uint32_t numSubslicesPerSlice;
+ uint32_t numSlices;
+ uint64_t timerResolution;
+ uint32_t timestampValidBits;
+ uint32_t kernelTimestampValidBits;
+ ze_device_uuid_t uuid;
+ char name[ZE_MAX_DEVICE_NAME];
+} ze_device_properties_t;
+
+typedef struct _zes_device_properties_t {
+ zes_structure_type_t stype;
+ void *pNext;
+ ze_device_properties_t core;
+ uint32_t numSubdevices;
+ char serialNumber[ZES_STRING_PROPERTY_SIZE];
+ char boardNumber[ZES_STRING_PROPERTY_SIZE];
+ char brandName[ZES_STRING_PROPERTY_SIZE];
+ char modelName[ZES_STRING_PROPERTY_SIZE];
+ char vendorName[ZES_STRING_PROPERTY_SIZE];
+ char driverVersion[ZES_STRING_PROPERTY_SIZE];
+} zes_device_properties_t;
+
+typedef struct _zes_device_ext_properties_t {
+ zes_structure_type_t stype;
+ void *pNext;
+ zes_uuid_t uuid;
+ zes_device_type_t type;
+ zes_device_property_flags_t flags;
+} zes_device_ext_properties_t;
+
+typedef struct _zes_mem_properties_t {
+ zes_structure_type_t stype;
+ void *pNext;
+ zes_mem_type_t type;
+ ze_bool_t onSubdevice;
+ uint32_t subdeviceId;
+ zes_mem_loc_t location;
+ uint64_t physicalSize;
+ int32_t busWidth;
+ int32_t numChannels;
+} zes_mem_properties_t;
+
+typedef struct _zes_mem_state_t {
+ zes_structure_type_t stype;
+ const void *pNext;
+ zes_mem_health_t health;
+ uint64_t free;
+ uint64_t size;
+} zes_mem_state_t;
+
+typedef struct oneapi_handle {
+ void *handle;
+ uint16_t verbose;
+
+ uint32_t num_drivers;
+ zes_driver_handle_t *drivers;
+ uint32_t *num_devices;
+ zes_device_handle_t **devices;
+
+ // TODO Driver major, minor information
+ // int driver_major;
+ // int driver_minor;
+
+ ze_result_t (*zesInit)(int);
+ ze_result_t (*zesDriverGet)(uint32_t *pCount, zes_driver_handle_t *phDrivers);
+ ze_result_t (*zesDeviceGet)(zes_driver_handle_t hDriver, uint32_t *pCount,
+ zes_device_handle_t *phDevices);
+ ze_result_t (*zesDeviceGetProperties)(zes_device_handle_t hDevice,
+ zes_device_properties_t *pProperties);
+ ze_result_t (*zesDeviceEnumMemoryModules)(zes_device_handle_t hDevice,
+ uint32_t *pCount,
+ zes_mem_handle_t *phMemory);
+ ze_result_t (*zesMemoryGetProperties)(zes_mem_handle_t hMemory,
+ zes_mem_properties_t *pProperties);
+ ze_result_t (*zesMemoryGetState)(zes_mem_handle_t hMemory,
+ zes_mem_state_t *pState);
+
+} oneapi_handle_t;
+
+typedef struct oneapi_init_resp {
+ char *err; // If err is non-null handle is invalid
+ oneapi_handle_t oh;
+} oneapi_init_resp_t;
+
+typedef struct oneapi_version_resp {
+ ze_result_t status;
+ char *str; // Contains version or error string if status != 0
+} oneapi_version_resp_t;
+
+void oneapi_init(char *oneapi_lib_path, oneapi_init_resp_t *resp);
+void oneapi_check_vram(oneapi_handle_t h, int driver, int device,
+ mem_info_t *resp);
+void oneapi_release(oneapi_handle_t h);
+int oneapi_get_device_count(oneapi_handle_t h, int driver);
+
+#endif // __GPU_INFO_INTEL_H__
+#endif // __APPLE__
diff --git a/ollama/gpu/gpu_linux.go b/ollama/gpu/gpu_linux.go
new file mode 100755
index 0000000000000000000000000000000000000000..d6d2675c889f41b4578a77d8d927887b7ee58b87
--- /dev/null
+++ b/ollama/gpu/gpu_linux.go
@@ -0,0 +1,92 @@
+package gpu
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/ollama/ollama/format"
+)
+
+var CudartGlobs = []string{
+ "/usr/local/cuda/lib64/libcudart.so*",
+ "/usr/lib/x86_64-linux-gnu/nvidia/current/libcudart.so*",
+ "/usr/lib/x86_64-linux-gnu/libcudart.so*",
+ "/usr/lib/wsl/lib/libcudart.so*",
+ "/usr/lib/wsl/drivers/*/libcudart.so*",
+ "/opt/cuda/lib64/libcudart.so*",
+ "/usr/local/cuda*/targets/aarch64-linux/lib/libcudart.so*",
+ "/usr/lib/aarch64-linux-gnu/nvidia/current/libcudart.so*",
+ "/usr/lib/aarch64-linux-gnu/libcudart.so*",
+ "/usr/local/cuda/lib*/libcudart.so*",
+ "/usr/lib*/libcudart.so*",
+ "/usr/local/lib*/libcudart.so*",
+}
+
+var NvmlGlobs = []string{}
+
+var NvcudaGlobs = []string{
+ "/usr/local/cuda*/targets/*/lib/libcuda.so*",
+ "/usr/lib/*-linux-gnu/nvidia/current/libcuda.so*",
+ "/usr/lib/*-linux-gnu/libcuda.so*",
+ "/usr/lib/wsl/lib/libcuda.so*",
+ "/usr/lib/wsl/drivers/*/libcuda.so*",
+ "/opt/cuda/lib*/libcuda.so*",
+ "/usr/local/cuda/lib*/libcuda.so*",
+ "/usr/lib*/libcuda.so*",
+ "/usr/local/lib*/libcuda.so*",
+}
+
+var OneapiGlobs = []string{
+ "/usr/lib/x86_64-linux-gnu/libze_intel_gpu.so*",
+ "/usr/lib*/libze_intel_gpu.so*",
+}
+
+var (
+ CudartMgmtName = "libcudart.so*"
+ NvcudaMgmtName = "libcuda.so*"
+ NvmlMgmtName = "" // not currently wired on linux
+ OneapiMgmtName = "libze_intel_gpu.so"
+)
+
+func GetCPUMem() (memInfo, error) {
+ var mem memInfo
+ var total, available, free, buffers, cached, freeSwap uint64
+ f, err := os.Open("/proc/meminfo")
+ if err != nil {
+ return mem, err
+ }
+ defer f.Close()
+ s := bufio.NewScanner(f)
+ for s.Scan() {
+ line := s.Text()
+ switch {
+ case strings.HasPrefix(line, "MemTotal:"):
+ _, err = fmt.Sscanf(line, "MemTotal:%d", &total)
+ case strings.HasPrefix(line, "MemAvailable:"):
+ _, err = fmt.Sscanf(line, "MemAvailable:%d", &available)
+ case strings.HasPrefix(line, "MemFree:"):
+ _, err = fmt.Sscanf(line, "MemFree:%d", &free)
+ case strings.HasPrefix(line, "Buffers:"):
+ _, err = fmt.Sscanf(line, "Buffers:%d", &buffers)
+ case strings.HasPrefix(line, "Cached:"):
+ _, err = fmt.Sscanf(line, "Cached:%d", &cached)
+ case strings.HasPrefix(line, "SwapFree:"):
+ _, err = fmt.Sscanf(line, "SwapFree:%d", &freeSwap)
+ default:
+ continue
+ }
+ if err != nil {
+ return mem, err
+ }
+ }
+ mem.TotalMemory = total * format.KibiByte
+ mem.FreeSwap = freeSwap * format.KibiByte
+ if available > 0 {
+ mem.FreeMemory = available * format.KibiByte
+ } else {
+ mem.FreeMemory = (free + buffers + cached) * format.KibiByte
+ }
+ return mem, nil
+}
diff --git a/ollama/gpu/gpu_oneapi.go b/ollama/gpu/gpu_oneapi.go
new file mode 100755
index 0000000000000000000000000000000000000000..9864bde5137ef5bca3ea5be5a3ddf3a036238de4
--- /dev/null
+++ b/ollama/gpu/gpu_oneapi.go
@@ -0,0 +1,21 @@
+//go:build linux || windows
+
+package gpu
+
+import (
+ "log/slog"
+ "strings"
+)
+
+func oneapiGetVisibleDevicesEnv(gpuInfo []GpuInfo) (string, string) {
+ ids := []string{}
+ for _, info := range gpuInfo {
+ if info.Library != "oneapi" {
+ // TODO shouldn't happen if things are wired correctly...
+ slog.Debug("oneapiGetVisibleDevicesEnv skipping over non-sycl device", "library", info.Library)
+ continue
+ }
+ ids = append(ids, info.ID)
+ }
+ return "ONEAPI_DEVICE_SELECTOR", "level_zero:" + strings.Join(ids, ",")
+}
diff --git a/ollama/gpu/gpu_test.go b/ollama/gpu/gpu_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..46d3201e1c8229b9d4473b165db4e7cd1d9dab47
--- /dev/null
+++ b/ollama/gpu/gpu_test.go
@@ -0,0 +1,35 @@
+package gpu
+
+import (
+ "runtime"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestBasicGetGPUInfo(t *testing.T) {
+ info := GetGPUInfo()
+ assert.NotEmpty(t, len(info))
+ assert.Contains(t, "cuda rocm cpu metal", info[0].Library)
+ if info[0].Library != "cpu" {
+ assert.Greater(t, info[0].TotalMemory, uint64(0))
+ assert.Greater(t, info[0].FreeMemory, uint64(0))
+ }
+}
+
+func TestCPUMemInfo(t *testing.T) {
+ info, err := GetCPUMem()
+ require.NoError(t, err)
+ switch runtime.GOOS {
+ case "darwin":
+ t.Skip("CPU memory not populated on darwin")
+ case "linux", "windows":
+ assert.Greater(t, info.TotalMemory, uint64(0))
+ assert.Greater(t, info.FreeMemory, uint64(0))
+ default:
+ return
+ }
+}
+
+// TODO - add some logic to figure out card type through other means and actually verify we got back what we expected
diff --git a/ollama/gpu/gpu_windows.go b/ollama/gpu/gpu_windows.go
new file mode 100755
index 0000000000000000000000000000000000000000..2ec72ba7b14f45073f97b1de941ee203302a90ce
--- /dev/null
+++ b/ollama/gpu/gpu_windows.go
@@ -0,0 +1,57 @@
+package gpu
+
+import (
+ "fmt"
+ "syscall"
+ "unsafe"
+)
+
+type MEMORYSTATUSEX struct {
+ length uint32
+ MemoryLoad uint32
+ TotalPhys uint64
+ AvailPhys uint64
+ TotalPageFile uint64
+ AvailPageFile uint64
+ TotalVirtual uint64
+ AvailVirtual uint64
+ AvailExtendedVirtual uint64
+}
+
+var (
+ k32 = syscall.NewLazyDLL("kernel32.dll")
+ globalMemoryStatusExProc = k32.NewProc("GlobalMemoryStatusEx")
+ sizeofMemoryStatusEx = uint32(unsafe.Sizeof(MEMORYSTATUSEX{}))
+)
+
+var CudartGlobs = []string{
+ "c:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v*\\bin\\cudart64_*.dll",
+}
+
+var NvmlGlobs = []string{
+ "c:\\Windows\\System32\\nvml.dll",
+}
+
+var NvcudaGlobs = []string{
+ "c:\\windows\\system*\\nvcuda.dll",
+}
+
+var OneapiGlobs = []string{
+ "c:\\Windows\\System32\\DriverStore\\FileRepository\\*\\ze_intel_gpu64.dll",
+}
+
+var (
+ CudartMgmtName = "cudart64_*.dll"
+ NvcudaMgmtName = "nvcuda.dll"
+ NvmlMgmtName = "nvml.dll"
+ OneapiMgmtName = "ze_intel_gpu64.dll"
+)
+
+func GetCPUMem() (memInfo, error) {
+ memStatus := MEMORYSTATUSEX{length: sizeofMemoryStatusEx}
+ r1, _, err := globalMemoryStatusExProc.Call(uintptr(unsafe.Pointer(&memStatus)))
+ if r1 == 0 {
+ return memInfo{}, fmt.Errorf("GlobalMemoryStatusEx failed: %w", err)
+ }
+ return memInfo{TotalMemory: memStatus.TotalPhys, FreeMemory: memStatus.AvailPhys, FreeSwap: memStatus.AvailPageFile}, nil
+}
diff --git a/ollama/gpu/types.go b/ollama/gpu/types.go
new file mode 100755
index 0000000000000000000000000000000000000000..8d22b06bfa55f51717ddab941f16cea49636502e
--- /dev/null
+++ b/ollama/gpu/types.go
@@ -0,0 +1,145 @@
+package gpu
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/ollama/ollama/format"
+)
+
+type memInfo struct {
+ TotalMemory uint64 `json:"total_memory,omitempty"`
+ FreeMemory uint64 `json:"free_memory,omitempty"`
+ FreeSwap uint64 `json:"free_swap,omitempty"`
+}
+
+// Beginning of an `ollama info` command
+type GpuInfo struct {
+ memInfo
+ Library string `json:"library,omitempty"`
+
+ // Optional variant to select (e.g. versions, cpu feature flags)
+ Variant CPUCapability `json:"variant"`
+
+ // MinimumMemory represents the minimum memory required to use the GPU
+ MinimumMemory uint64 `json:"-"`
+
+ // Any extra PATH/LD_LIBRARY_PATH dependencies required for the Library to operate properly
+ DependencyPath string `json:"lib_path,omitempty"`
+
+ // Extra environment variables specific to the GPU as list of [key,value]
+ EnvWorkarounds [][2]string `json:"envs,omitempty"`
+
+ // Set to true if we can NOT reliably discover FreeMemory. A value of true indicates
+ // the FreeMemory is best effort, and may over or under report actual memory usage
+ // False indicates FreeMemory can generally be trusted on this GPU
+ UnreliableFreeMemory bool
+
+ // GPU information
+ ID string `json:"gpu_id"` // string to use for selection of this specific GPU
+ Name string `json:"name"` // user friendly name if available
+ Compute string `json:"compute"` // Compute Capability or gfx
+
+ // Driver Information - TODO no need to put this on each GPU
+ DriverMajor int `json:"driver_major,omitempty"`
+ DriverMinor int `json:"driver_minor,omitempty"`
+
+ // TODO other performance capability info to help in scheduling decisions
+}
+
+type CPUInfo struct {
+ GpuInfo
+}
+
+type CudaGPUInfo struct {
+ GpuInfo
+ OSOverhead uint64 // Memory overhead between the driver library and management library
+ index int //nolint:unused,nolintlint
+}
+type CudaGPUInfoList []CudaGPUInfo
+
+type RocmGPUInfo struct {
+ GpuInfo
+ usedFilepath string //nolint:unused,nolintlint
+ index int //nolint:unused,nolintlint
+}
+type RocmGPUInfoList []RocmGPUInfo
+
+type OneapiGPUInfo struct {
+ GpuInfo
+ driverIndex int //nolint:unused,nolintlint
+ gpuIndex int //nolint:unused,nolintlint
+}
+type OneapiGPUInfoList []OneapiGPUInfo
+
+type GpuInfoList []GpuInfo
+
+// Split up the set of gpu info's by Library and variant
+func (l GpuInfoList) ByLibrary() []GpuInfoList {
+ resp := []GpuInfoList{}
+ libs := []string{}
+ for _, info := range l {
+ found := false
+ requested := info.Library
+ if info.Variant != CPUCapabilityNone {
+ requested += "_" + info.Variant.String()
+ }
+ for i, lib := range libs {
+ if lib == requested {
+ resp[i] = append(resp[i], info)
+ found = true
+ break
+ }
+ }
+ if !found {
+ libs = append(libs, info.Library)
+ resp = append(resp, []GpuInfo{info})
+ }
+ }
+ return resp
+}
+
+// Report the GPU information into the log an Info level
+func (l GpuInfoList) LogDetails() {
+ for _, g := range l {
+ slog.Info("inference compute",
+ "id", g.ID,
+ "library", g.Library,
+ "compute", g.Compute,
+ "driver", fmt.Sprintf("%d.%d", g.DriverMajor, g.DriverMinor),
+ "name", g.Name,
+ "total", format.HumanBytes2(g.TotalMemory),
+ "available", format.HumanBytes2(g.FreeMemory),
+ )
+ }
+}
+
+// Sort by Free Space
+type ByFreeMemory []GpuInfo
+
+func (a ByFreeMemory) Len() int { return len(a) }
+func (a ByFreeMemory) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a ByFreeMemory) Less(i, j int) bool { return a[i].FreeMemory < a[j].FreeMemory }
+
+type CPUCapability uint32
+
+// Override at build time when building base GPU runners
+var GPURunnerCPUCapability = CPUCapabilityAVX
+
+const (
+ CPUCapabilityNone CPUCapability = iota
+ CPUCapabilityAVX
+ CPUCapabilityAVX2
+ // TODO AVX512
+)
+
+func (c CPUCapability) String() string {
+ switch c {
+ case CPUCapabilityAVX:
+ return "avx"
+ case CPUCapabilityAVX2:
+ return "avx2"
+ default:
+ return "no vector extensions"
+ }
+}
diff --git a/ollama/integration/README.md b/ollama/integration/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..e2bdd6b21d75b88fe1e080e40f7f519fdf65adcd
--- /dev/null
+++ b/ollama/integration/README.md
@@ -0,0 +1,11 @@
+# Integration Tests
+
+This directory contains integration tests to exercise Ollama end-to-end to verify behavior
+
+By default, these tests are disabled so `go test ./...` will exercise only unit tests. To run integration tests you must pass the integration tag. `go test -tags=integration ./...`
+
+
+The integration tests have 2 modes of operating.
+
+1. By default, they will start the server on a random port, run the tests, and then shutdown the server.
+2. If `OLLAMA_TEST_EXISTING` is set to a non-empty string, the tests will run against an existing running server, which can be remote
diff --git a/ollama/integration/basic_test.go b/ollama/integration/basic_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..8e35b5c5b0eff0804803fee854e3507709788876
--- /dev/null
+++ b/ollama/integration/basic_test.go
@@ -0,0 +1,63 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "log/slog"
+ "os"
+ "runtime"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+ "github.com/stretchr/testify/require"
+)
+
+func TestOrcaMiniBlueSky(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+ // Set up the test data
+ req := api.GenerateRequest{
+ Model: "orca-mini",
+ Prompt: "why is the sky blue?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "temperature": 0,
+ "seed": 123,
+ },
+ }
+ GenerateTestHelper(ctx, t, req, []string{"rayleigh", "scattering"})
+}
+
+func TestUnicodeModelDir(t *testing.T) {
+ // This is only useful for Windows with utf-16 characters, so skip this test for other platforms
+ if runtime.GOOS != "windows" {
+ t.Skip("Unicode test only applicable to windows")
+ }
+ // Only works for local testing
+ if os.Getenv("OLLAMA_TEST_EXISTING") != "" {
+ t.Skip("TestUnicodeModelDir only works for local testing, skipping")
+ }
+
+ modelDir, err := os.MkdirTemp("", "ollama_埃")
+ require.NoError(t, err)
+ defer os.RemoveAll(modelDir)
+ slog.Info("unicode", "OLLAMA_MODELS", modelDir)
+
+ t.Setenv("OLLAMA_MODELS", modelDir)
+
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+
+ req := api.GenerateRequest{
+ Model: "orca-mini",
+ Prompt: "why is the sky blue?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "temperature": 0,
+ "seed": 123,
+ },
+ }
+ GenerateTestHelper(ctx, t, req, []string{"rayleigh", "scattering"})
+}
diff --git a/ollama/integration/concurrency_test.go b/ollama/integration/concurrency_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..42e9d07490e3c139d4e7080d96f0d81554aec3b5
--- /dev/null
+++ b/ollama/integration/concurrency_test.go
@@ -0,0 +1,270 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "log/slog"
+ "os"
+ "strconv"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/ollama/ollama/api"
+ "github.com/ollama/ollama/format"
+)
+
+func TestMultiModelConcurrency(t *testing.T) {
+ var (
+ req = [2]api.GenerateRequest{
+ {
+ Model: "orca-mini",
+ Prompt: "why is the ocean blue?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "tinydolphin",
+ Prompt: "what is the origin of the us thanksgiving holiday?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ },
+ }
+ resp = [2][]string{
+ {"sunlight"},
+ {"england", "english", "massachusetts", "pilgrims", "british"},
+ }
+ )
+ var wg sync.WaitGroup
+ wg.Add(len(req))
+ ctx, cancel := context.WithTimeout(context.Background(), time.Second*240)
+ defer cancel()
+
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+
+ for i := 0; i < len(req); i++ {
+ require.NoError(t, PullIfMissing(ctx, client, req[i].Model))
+ }
+
+ for i := 0; i < len(req); i++ {
+ go func(i int) {
+ defer wg.Done()
+ DoGenerate(ctx, t, client, req[i], resp[i], 60*time.Second, 10*time.Second)
+ }(i)
+ }
+ wg.Wait()
+}
+
+func TestIntegrationConcurrentPredictOrcaMini(t *testing.T) {
+ req, resp := GenerateRequests()
+ reqLimit := len(req)
+ iterLimit := 5
+
+ if s := os.Getenv("OLLAMA_MAX_VRAM"); s != "" {
+ maxVram, err := strconv.ParseUint(s, 10, 64)
+ require.NoError(t, err)
+ // Don't hammer on small VRAM cards...
+ if maxVram < 4*format.GibiByte {
+ reqLimit = min(reqLimit, 2)
+ iterLimit = 2
+ }
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 9*time.Minute)
+ defer cancel()
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+
+ // Get the server running (if applicable) warm the model up with a single initial request
+ DoGenerate(ctx, t, client, req[0], resp[0], 60*time.Second, 10*time.Second)
+
+ var wg sync.WaitGroup
+ wg.Add(reqLimit)
+ for i := 0; i < reqLimit; i++ {
+ go func(i int) {
+ defer wg.Done()
+ for j := 0; j < iterLimit; j++ {
+ slog.Info("Starting", "req", i, "iter", j)
+ // On slower GPUs it can take a while to process the concurrent requests
+ // so we allow a much longer initial timeout
+ DoGenerate(ctx, t, client, req[i], resp[i], 120*time.Second, 20*time.Second)
+ }
+ }(i)
+ }
+ wg.Wait()
+}
+
+// Stress the system if we know how much VRAM it has, and attempt to load more models than will fit
+func TestMultiModelStress(t *testing.T) {
+ s := os.Getenv("OLLAMA_MAX_VRAM") // TODO - discover actual VRAM
+ if s == "" {
+ t.Skip("OLLAMA_MAX_VRAM not specified, can't pick the right models for the stress test")
+ }
+
+ maxVram, err := strconv.ParseUint(s, 10, 64)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ type model struct {
+ name string
+ size uint64 // Approximate amount of VRAM they typically use when fully loaded in VRAM
+ }
+
+ smallModels := []model{
+ {
+ name: "orca-mini",
+ size: 2992 * format.MebiByte,
+ },
+ {
+ name: "phi",
+ size: 2616 * format.MebiByte,
+ },
+ {
+ name: "gemma:2b",
+ size: 2364 * format.MebiByte,
+ },
+ {
+ name: "stable-code:3b",
+ size: 2608 * format.MebiByte,
+ },
+ {
+ name: "starcoder2:3b",
+ size: 2166 * format.MebiByte,
+ },
+ }
+ mediumModels := []model{
+ {
+ name: "llama2",
+ size: 5118 * format.MebiByte,
+ },
+ {
+ name: "mistral",
+ size: 4620 * format.MebiByte,
+ },
+ {
+ name: "orca-mini:7b",
+ size: 5118 * format.MebiByte,
+ },
+ {
+ name: "dolphin-mistral",
+ size: 4620 * format.MebiByte,
+ },
+ {
+ name: "gemma:7b",
+ size: 5000 * format.MebiByte,
+ },
+ {
+ name: "codellama:7b",
+ size: 5118 * format.MebiByte,
+ },
+ }
+
+ // These seem to be too slow to be useful...
+ // largeModels := []model{
+ // {
+ // name: "llama2:13b",
+ // size: 7400 * format.MebiByte,
+ // },
+ // {
+ // name: "codellama:13b",
+ // size: 7400 * format.MebiByte,
+ // },
+ // {
+ // name: "orca-mini:13b",
+ // size: 7400 * format.MebiByte,
+ // },
+ // {
+ // name: "gemma:7b",
+ // size: 5000 * format.MebiByte,
+ // },
+ // {
+ // name: "starcoder2:15b",
+ // size: 9100 * format.MebiByte,
+ // },
+ // }
+
+ var chosenModels []model
+ switch {
+ case maxVram < 10000*format.MebiByte:
+ slog.Info("selecting small models")
+ chosenModels = smallModels
+ // case maxVram < 30000*format.MebiByte:
+ default:
+ slog.Info("selecting medium models")
+ chosenModels = mediumModels
+ // default:
+ // slog.Info("selecting large models")
+ // chosenModels = largModels
+ }
+
+ req, resp := GenerateRequests()
+
+ for i := range req {
+ if i > len(chosenModels) {
+ break
+ }
+ req[i].Model = chosenModels[i].name
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 15*time.Minute) // TODO baseline -- 10m too short
+ defer cancel()
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+
+ // Make sure all the models are pulled before we get started
+ for _, r := range req {
+ require.NoError(t, PullIfMissing(ctx, client, r.Model))
+ }
+
+ var wg sync.WaitGroup
+ consumed := uint64(256 * format.MebiByte) // Assume some baseline usage
+ for i := 0; i < len(req); i++ {
+ // Always get at least 2 models, but dont' overshoot VRAM too much or we'll take too long
+ if i > 1 && consumed > maxVram {
+ slog.Info("achieved target vram exhaustion", "count", i, "vram", format.HumanBytes2(maxVram), "models", format.HumanBytes2(consumed))
+ break
+ }
+ consumed += chosenModels[i].size
+ slog.Info("target vram", "count", i, "vram", format.HumanBytes2(maxVram), "models", format.HumanBytes2(consumed))
+
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ for j := 0; j < 3; j++ {
+ slog.Info("Starting", "req", i, "iter", j, "model", req[i].Model)
+ DoGenerate(ctx, t, client, req[i], resp[i], 120*time.Second, 5*time.Second)
+ }
+ }(i)
+ }
+ go func() {
+ for {
+ time.Sleep(2 * time.Second)
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ models, err := client.ListRunning(ctx)
+ if err != nil {
+ slog.Warn("failed to list running models", "error", err)
+ continue
+ }
+ for _, m := range models.Models {
+ slog.Info("loaded model snapshot", "model", m)
+ }
+ }
+ }
+ }()
+ wg.Wait()
+}
diff --git a/ollama/integration/context_test.go b/ollama/integration/context_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..f1342e16c6c6e3055e2085699326e04ca352a186
--- /dev/null
+++ b/ollama/integration/context_test.go
@@ -0,0 +1,34 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+)
+
+func TestContextExhaustion(t *testing.T) {
+ // Longer needed for small footprint GPUs
+ ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+ defer cancel()
+ // Set up the test data
+ req := api.GenerateRequest{
+ Model: "llama2",
+ Prompt: "Write me a story with a ton of emojis?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "temperature": 0,
+ "seed": 123,
+ "num_ctx": 128,
+ },
+ }
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+ if err := PullIfMissing(ctx, client, req.Model); err != nil {
+ t.Fatalf("PullIfMissing failed: %v", err)
+ }
+ DoGenerate(ctx, t, client, req, []string{"once", "upon", "lived"}, 120*time.Second, 10*time.Second)
+}
diff --git a/ollama/integration/embed_test.go b/ollama/integration/embed_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..10333d5dfa9992bc6a2bf05a715f61d8e016171f
--- /dev/null
+++ b/ollama/integration/embed_test.go
@@ -0,0 +1,209 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "math"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+)
+
+func floatsEqual32(a, b float32) bool {
+ return math.Abs(float64(a-b)) <= 1e-4
+}
+
+func floatsEqual64(a, b float64) bool {
+ return math.Abs(a-b) <= 1e-4
+}
+
+func TestAllMiniLMEmbeddings(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+
+ req := api.EmbeddingRequest{
+ Model: "all-minilm",
+ Prompt: "why is the sky blue?",
+ }
+
+ res, err := embeddingTestHelper(ctx, t, req)
+
+ if err != nil {
+ t.Fatalf("error: %v", err)
+ }
+
+ if len(res.Embedding) != 384 {
+ t.Fatalf("expected 384 floats, got %d", len(res.Embedding))
+ }
+
+ if !floatsEqual64(res.Embedding[0], 0.06642947345972061) {
+ t.Fatalf("expected 0.06642947345972061, got %.16f", res.Embedding[0])
+ }
+}
+
+func TestAllMiniLMEmbed(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+
+ req := api.EmbedRequest{
+ Model: "all-minilm",
+ Input: "why is the sky blue?",
+ }
+
+ res, err := embedTestHelper(ctx, t, req)
+
+ if err != nil {
+ t.Fatalf("error: %v", err)
+ }
+
+ if len(res.Embeddings) != 1 {
+ t.Fatalf("expected 1 embedding, got %d", len(res.Embeddings))
+ }
+
+ if len(res.Embeddings[0]) != 384 {
+ t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0]))
+ }
+
+ if !floatsEqual32(res.Embeddings[0][0], 0.010071031) {
+ t.Fatalf("expected 0.010071031, got %.8f", res.Embeddings[0][0])
+ }
+
+ if res.PromptEvalCount != 8 {
+ t.Fatalf("expected 8 prompt tokens, got %d", res.PromptEvalCount)
+ }
+}
+
+func TestAllMiniLMBatchEmbed(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+
+ req := api.EmbedRequest{
+ Model: "all-minilm",
+ Input: []string{"why is the sky blue?", "why is the grass green?"},
+ }
+
+ res, err := embedTestHelper(ctx, t, req)
+
+ if err != nil {
+ t.Fatalf("error: %v", err)
+ }
+
+ if len(res.Embeddings) != 2 {
+ t.Fatalf("expected 2 embeddings, got %d", len(res.Embeddings))
+ }
+
+ if len(res.Embeddings[0]) != 384 {
+ t.Fatalf("expected 384 floats, got %d", len(res.Embeddings[0]))
+ }
+
+ if !floatsEqual32(res.Embeddings[0][0], 0.010071031) || !floatsEqual32(res.Embeddings[1][0], -0.009802706) {
+ t.Fatalf("expected 0.010071031 and -0.009802706, got %.8f and %.8f", res.Embeddings[0][0], res.Embeddings[1][0])
+ }
+
+ if res.PromptEvalCount != 16 {
+ t.Fatalf("expected 16 prompt tokens, got %d", res.PromptEvalCount)
+ }
+}
+
+func TestAllMiniLMEmbedTruncate(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
+ defer cancel()
+
+ truncTrue, truncFalse := true, false
+
+ type testReq struct {
+ Name string
+ Request api.EmbedRequest
+ }
+
+ reqs := []testReq{
+ {
+ Name: "Target Truncation",
+ Request: api.EmbedRequest{
+ Model: "all-minilm",
+ Input: "why",
+ },
+ },
+ {
+ Name: "Default Truncate",
+ Request: api.EmbedRequest{
+ Model: "all-minilm",
+ Input: "why is the sky blue?",
+ Options: map[string]any{"num_ctx": 1},
+ },
+ },
+ {
+ Name: "Explicit Truncate",
+ Request: api.EmbedRequest{
+ Model: "all-minilm",
+ Input: "why is the sky blue?",
+ Truncate: &truncTrue,
+ Options: map[string]any{"num_ctx": 1},
+ },
+ },
+ }
+
+ res := make(map[string]*api.EmbedResponse)
+
+ for _, req := range reqs {
+ response, err := embedTestHelper(ctx, t, req.Request)
+ if err != nil {
+ t.Fatalf("error: %v", err)
+ }
+ res[req.Name] = response
+ }
+
+ if res["Target Truncation"].Embeddings[0][0] != res["Default Truncate"].Embeddings[0][0] {
+ t.Fatal("expected default request to truncate correctly")
+ }
+
+ if res["Default Truncate"].Embeddings[0][0] != res["Explicit Truncate"].Embeddings[0][0] {
+ t.Fatal("expected default request and truncate true request to be the same")
+ }
+
+ // check that truncate set to false returns an error if context length is exceeded
+ _, err := embedTestHelper(ctx, t, api.EmbedRequest{
+ Model: "all-minilm",
+ Input: "why is the sky blue?",
+ Truncate: &truncFalse,
+ Options: map[string]any{"num_ctx": 1},
+ })
+
+ if err == nil {
+ t.Fatal("expected error, got nil")
+ }
+}
+
+func embeddingTestHelper(ctx context.Context, t *testing.T, req api.EmbeddingRequest) (*api.EmbeddingResponse, error) {
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+ if err := PullIfMissing(ctx, client, req.Model); err != nil {
+ t.Fatalf("failed to pull model %s: %v", req.Model, err)
+ }
+
+ response, err := client.Embeddings(ctx, &req)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return response, nil
+}
+
+func embedTestHelper(ctx context.Context, t *testing.T, req api.EmbedRequest) (*api.EmbedResponse, error) {
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+ if err := PullIfMissing(ctx, client, req.Model); err != nil {
+ t.Fatalf("failed to pull model %s: %v", req.Model, err)
+ }
+
+ response, err := client.Embed(ctx, &req)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return response, nil
+}
diff --git a/ollama/integration/llm_image_test.go b/ollama/integration/llm_image_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..d0c861cc07e74e0ddc90e386156a581bcc644816
--- /dev/null
+++ b/ollama/integration/llm_image_test.go
@@ -0,0 +1,537 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "encoding/base64"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+ "github.com/stretchr/testify/require"
+)
+
+func TestIntegrationMultimodal(t *testing.T) {
+ image, err := base64.StdEncoding.DecodeString(imageEncoding)
+ require.NoError(t, err)
+ req := api.GenerateRequest{
+ Model: "llava:7b",
+ Prompt: "what does the text in this image say?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ Images: []api.ImageData{
+ image,
+ },
+ }
+
+ // Note: sometimes it returns "the ollamas" sometimes "the ollams"
+ resp := "the ollam"
+ ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
+ defer cancel()
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+ require.NoError(t, PullIfMissing(ctx, client, req.Model))
+ // llava models on CPU can be quite slow to start,
+ DoGenerate(ctx, t, client, req, []string{resp}, 120*time.Second, 30*time.Second)
+}
+
+const imageEncoding = `iVBORw0KGgoAAAANSUhEUgAAANIAAAB4CAYAAACHHqzKAAAAAXNSR0IArs4c6QAAAIRlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEb
+AAUAAAABAAAAUgEoAAMAAAABAAIAAIdpAAQAAAABAAAAWgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAANKgAwAEAAAAAQAA
+AHgAAAAAXdsepgAAAAlwSFlzAAALEwAACxMBAJqcGAAAAVlpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6
+bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDYuMC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1z
+eW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNv
+bS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpPcmllbnRhdGlvbj4xPC90aWZmOk9yaWVudGF0aW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAg
+PC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KGV7hBwAAQABJREFUeAGE3QfgX9P5OP6TIRKRncgmS6aR2DNCKEKLqqpRW9FWq0q1dEQparZKF7VK7aq99yZGSCRB
+BhErk0Qmyf95nZOTfOqrv/9J7ud977nnPPt5zrz3Ntp0s61XrLnmmql58+Zp6dKlqUWLFmnZsmXp888/Tx07dkwLFy5MX3zxRT4aNWqUmjVrlho3bpzatGmT
+Pvnkk5y/YsWKXHfttdfOv/VauSZNmuRj0aJFSX15cIAPruS3adOmafny5Uld5dDkXP05c+akTp06pTXWWCN99tlnacmSJQGnUVp77VbpvffeS126dM4wli4t
+dK8RsJoHDvUXL16cy7du3TrjXrBgQS675prNUsu1WgV/AW/ZktSxQ4dMC37BXbDgs7Q4aG7cpHFq2bJlpo984EY/3vELB94k+eqjU36V1fz580OmSyO/WZZt
+8+Zr5jKu8YZv8pTgkCoMcnCgm17atm2bz+Gv8NWnvxUrlgd9S3P+4sWLQnZNc91PP/0ktWrVOst19uzZwc9akd98lczxN3fu3FwPLudrtwrelqcsM7LG95rN
+Qv4LF2U6XLvfvMWaq2gi90ahX2mttdbK5ej2o48+ymXokv7Ri/ZPP/00LQ16O3bqmOuwCbiaNSv8Ngs5fhFl2QPe1fXLBtgLutHrVyJnciffZWELS0KWytEL
+Odd66oDjHrnjpdoiGTbyL3DRAX3AT77xEzAW5nrwuY9m/DTp3bvf6Hbt2oWgW2WC3ARYZQdA8+bNW2UYiILU4T6FIsw1w0NAYaZ5RoT4KgRIwa8GgBgEEjC4
+DFJdB9jynTNYDqF+pQdDyqw23ma5nGv1MIcnuAgMHPfQWuholtKKlNaEP2heujQMYyVuTrT8i+VpUeCsNFIEueAFDWBSXD1nOO7PmjU7nK9J+uLzkE/AnRnX
+yi5atDgbcMsoN3/+Z2nK1PfS2i1bxL0mmQ+OXmlEO4fEX4eOHTJORiefPNdYoxiR8nTHwCR8f/EFY8T/iqyThjyjkdHBRdbkIMGFdrLiqIx5/vwFaY2ma+R7
+1UA5M0OjM7Dw59x9sPANDn47dGgfZVOmPSOJP2RF/+5LfjmsX/ckcqp0gkfv+GQDZF9tjyyc+yUbNLjmGHPmzE0LQk6u8Yov5zUYu0YvPGRGFpmfkDd+QvAZ
+F9jwg7F8+RfB29KcX+WMbvxKTfoPGDQ6HC2nShjBKuwXg126dMkKwBAiOA/CCRYBkAHaKhBSvnodIsKrywDBpVCplnWubFWSX+UZP1jKFYK/yPgqXLDQQyFw
+Y1Id5THVPBxl5qxZWfBgEgZ6CLdJtC5oBrd5i+ZRNoQWPM1fMD8bIyNcGBEXn40bRUQKXhktOASMdzRSgoNTukbbhx/OjOtmqVevnql9GHe3bl1DZi2Cjpap
+e/duaZ11OoXzvJsWhzI6d+6Yhg/fOk17590MFz7w8A0Pep2DvzgMC72Zt7in3DrrrBM8r53pgrsamJZEvWoUZAU2OLWMewyPQ+KHE+LBr7qff74sG7M6Ak1U
+z62yenBXfJ9FsGkaLR5HoAt6qLjAw0MNouo64ENTTZwWTDaCR85SaCgtkxYV33SmnFTpJidlHXQPPidaFHjR4T6a3NNCCSBgKM9e8Fdhocu5+5wK7ehUFr8f
+f/xxBL3S25LvkO+Qcrldd/v6imIcy+JG41WMtm/fPjMHISF/8P77YXALMnEAIFbkEvkqUADlI0pSFyMEDXltip0zTvkExckWMNaVzgaeesoQLmPW3arOUxlm
+OIRVIzI+aotBMeoTrnx4wMQXfGhv0rhprvtFRBtOMC/gaYWaN2+R+dK1+DycS3k0zZz5cZQvRt0BnFAeJc+aPTftsvMO6eennJwVWmRTWgmGKJqhffr099LR
+3/t+uvKKv6W+ffumu++5N+2z37Fpj123TLNmzkyd1umcHR9f8FG4rqdgwHnwQNG1C4vH6mRVT4xCGfjcw7trMip8N849DDDJrtZniM7xQz8McUG0SuS+NLq+
+5Coo0Lcya0b3q0uXrmFEjdMnK1tLAbYaL9lrAeCuhkf2nBgs5dgJWeFVYh/oZch4rc7iGr01YMqvOleX3XFK+iU79kEOeFLPffck53A40AFmlQ/+lXeNVvfR
+Cwd86tb6aNA6fx49D3LNbawKGMcI711rrZYZGCYh5JGQUI6EQIDdg7h6dEOi5akPsaQ8BolMs+saXr9gtwyHIVhEKYdQTGICHMpQlkDeD6emCHQU41oYDtM2
+160wlCcMNOJLFwhNaJTAnzN7Tnacxk0apQ8+CIFFfoeOneKvrkTrTN/cuXMyfjQZ04DHOVvHQcFahsefHp+O+V7vaGk6A/0/U+9evdK222wVrVW3XGZA//VT
+9y5tomWakV59+ZnUfO0eaY/dts+8MUo8zA4nHfvqi9Eh7x79pPfSVlvvkLp27Rz5c7KclCM/vEnkRYbyyBe/8hg/OZAhuc6KVptcyQ9PeHEfTvkSmS0LvgUz
+9+NGLqMcvLPn6LYW54M/yyX0AoZruoIPbnYwM4KFfE5vuCDRAxrkf77SDhly5YHNKYMH+pTQxyblK8d58PTZZ9EdjfLKgk8GyqAHTOd+yQU+/KFNK5wDRshB
+HQHAWJJ9tY8u6lotip2xAXXBwYNrrSacTQm6fft2uZIbCONUkGNeswspJhDIUAkVEgw5KAIw5xA5RyRBggGmOqIruBwVnEqMFkekd28ZZqKOuu6DRdBoqwZB
+mNVp4Q7zyTQTJhjKoo/Q5FV60MYJCYLQFy1cnAezTVY0zhG2jkeaNFkjfRKKUL9ROJl6eKs8wl0VCd+2W/ZP199wSx5Xde68TuZ39913y3Jj8HfffXemY8xL
+L6d33p2+ypnRPueTxenHxx8VrdkJacqUqenKq65PHdq3ztH//odfSDuP2DRdfPGf8phDj+C5515Izzz3Sho8sE+aMeP9rBfKZ7DgodU5eaOf/J37JdOqC2Xc
+x0s98AhWNXaBY01jreVF9sZEJjEWL14SjhRjthhHduzUYZUDkgVc4Ah04DvneA734FcOrRy04qTTpStth5wrP3TuUKfaolYCjeq7x07c0+XnANVuODY7U7d/
+//5RZvZK+2yWJ0DkC5r40c0nB3Q50EVmi6Krr4vLJ9hVjx49Mgw0uCZv+Brt8839c9eOsarsJgG46Rpws3cIQjxlOK9NX0NGCUOSRxgSj2e46kJeiC9llEOs
+svKrUNFAobWsusqgi4O4B9aSJYuzMEUFjFa60WywbHaKQ+uOEOr8+TFLFJMKZoWUb8J5o2yZ4SoGBHaTiLJpRaPc314UhiOBAzchi3auK83odr502fL0wnOP
+pf2+fWC65por8njt3XCc9dZbN3XtPjB9MGNKOurow9Mf/3BhhvX66+NiZmlJ2mzTTTMOfx599LH03UOOC8dpm/b/9l7puOOOybhqAfhv+8/t6fCjT047bjc0
+ZtEEqIURzUv/f3l0N4xPi9HqfpQILmqThyCGVrJirGTRIsaL9MDQ/CpDBytCbmYttcqSmT7BsM4GNo3JCF1kxkTHuqfkSTYcRyKrqj92U4JYCaLkpuyCGKN+
++un8fF51TIdsEN3orLYCpm4cmLNnzwrcZbxKN2wEPvTArw6cyreLY8rUqbm1gZfjVRzV/ti2AMAG2K18ZeUL9mTJWefNm5umTXsn+4BGSCBv0q/fgNGEvmYQ
+9nkIGIGYAQzTiKnRQblqyBDJZ6AShBAjrrYgZvGygYXy1VOe4MB1TlDV+8EDSz44tVvmPlrANIXMQQgLvqKg0q81roGLcpct/SK1DVjRXoZBLItAEN21EIKx
+SnXmFs2j/7xC/zYmHYIegs+RJcaJxkaMjlHBj3a4yAKdzhkrXuGkODR2aN82JlzapoED1k+7fm2XXF/5F154LQ0Z3C+1DmV2jan6UaN2z/cooVvXrlneYEq9
+e/eKaPl+8Ls0XXDB77Niyf2ll14K2TTJRrHBkCEpZp3T3fc9HBMbrbKC0fDZgtJ9IadyLItfA/fSvwe/ZQyaa9fOAJrDcIZPPpmX+cGHvLlz52V+Ca7qiuzw
+TS7krx4jIxeHGVCtBHmSjXK1LJ3Kd78Etfmruk/oAkdZuMkUHjDlfxF5einqu4dhY1nd02qH9PRZyJoeq/3Jq/b0/gcfZD1VfcFJZuQOJ3rhq/erbvkCvsEB
+b/r06VG+TJigV7lP5n2SGkOqn4tQwnGt+eXFy8IIeTRiJcAoAUMEXg0cMkAJqEYAMIx7uoahmMVCbG3uFy2K/nYkeZVRsCRlGLQmWpJPmHDoWoBLGcpwjI8+
+mpnvK2sw3DrGLB07ts+O0CzWPXRPPo3+fBZ08AKe+nhep9M6Ofo2DgESCD7jNNOs5ZKnbBWuuvhfK2jQunFowkcTmVDu4sUxuI/fhmnhwiURyRdlWrUYWkjp
+i+ganXHGWWmXXfZKb7/99qoq667bM+277zeyA8u8/vob09Zbb51+ceovV8ll1113SdMmvxN4W+RybVq3CZ21Cf60MsYrbbOMBC50043Wh34YBjrmBv0mFIx3
+QvVZH/ihE7Dw7aAn+WDRBXj0LcDg28Fu/AqA5KGco8qQ3MAgszJWKt1/QYLc6VMib06kxVCfY5jUAb/aoVlZa1NsxX1OiiaOXINsDW5owUPRXZkVxB9aqk2Y
+6ZOnDhx4c0gtAqZxMDs2BjZ+AqvaLR3SZlMZmNBciYIMliIgInSVEMLJKAPjFIFASCuzBFaZAINAwHFUBzWuUB9RYCqHeAqoc/yUprw858rVFkpdNHEQXQGt
+RvtoBfDw5ptvp6nT56Z2rddOc2YtjO5U+9R/wHphEK1j0W9ZsFq6m1qoYC1wl1m8tQJGs+DfDMyKFWumiZMmp5dfnRRO1jr16NYl06sV1D1jDOPfeCONe218
+GrbpJhEgtKAMrwQBvHaKxUXOiwfJDyMAv8xwWmcrEx4zZryXrrvuP+FEL6exY19P/fr1y3XQ16vXevmcXA866ID03e8elGVB7hJ5RRubloSxrR2LrYsbi+gW
+CGOdK1okk0Z0R+aMgp7o1DoNZzMm0FWzcLl2q9LdW7rU5EBpkeCNdibLnnx1f8kQDPxUmGyHDuXrLtORBK+ZRvTW8YV6nJY+S8Ashk/XDjCUn/7uu3mSg6Oy
+I/iVh6caOX7A40jyXYMBNtrpynKGQysMtrLsBw3KrHZCOomJpnBgSZliD9HafFp6SvLAltDEFyrPaG7KKx26AISCeATKQ0x1JERWJ6IkTiAPMcozIr+QMX7n
+fhGgm0FpEkEQrHsIAQMhDtcEUnHDBy6m9ZUJQDkK7dmzR5o8eWoaP+7ltOHGm6cRI7ZLh0Ykx2AR7JIY2L+bXhzzahr7ynNp8ODNUt9+6wbesvsBHC0j/Mp/
+GgJ74vGx6YfHH5jWW3fdcJgJ6aorb0t77Dk8RyKCffTxZ9NmwwanQ797YJ55/Nf1t0YLqEtYAoaIXVrIsosjMxzxau7c+alXr245AOCxJkbbrt3acVnWxGo+
+pTIeCe8ffvhh/JoIaFxakzh/4YUXU5uI/vRD1mRjXOcaDtcmBJyDoYdBvyZD6GzKlCnZmTikWUs4tNKClPILY8HbTJZAoUVFEz7hokPw4BBMGTkHhs89MrV2
+VoMXOdM3e1JfkGEnaEOva7Bck3ObgE0/bEEwdbADdgGf8nhRto6hXCsPHv4ki/bsAU26rmy24mTnxQbKfIAewxwzdVG30FS6w/yCDaMbz/jgSGChh87ByY6E
+KYUQ7KaCEGIOQsS7lgijElwiVYl0kClXBaSM+5QCudaOA8lz3WZlF87qtTJaOQLSpDNszX+NGNUQCMU5g7rj9mfSYYfvkc79/Zlpww03yBGaAhsmszEGpK+P
+G5/+c9sd6W9/uzTt881vZzrnxAAaLzNmzFjZ0i5JDz10Qxq50070n2Wx225fS78947w0ZFC/9MRTY9KJP/5e+v73j4t6jD+lb++3b/rBD08IesvYEg9zYmzR
+Irpbq1MEizXLDoGWa7WI3QKly+A+Q6C0xo17hHxX16B4rbtkQuSyy65IZ511Rr7+xS9OS9/61jfT25Mnh6xjRi4rNGbqYmxXDDQWciPQ6faC0yKmbhk62hwG
+y7qtdYZLK9Z0jTJV3ry58ULp/zcLp6GvttHq0gPZ0jGj0X2Diy7pSjl8WFvT/WZDtWXjoGyHPay1Vo8sc3aiDON0D4w8vgm7Y/xwyBNIlYHfNZ7YDpvUerDP
+du3a5zzyEuDVz3Jb6VCl3vIsB7jAZEN4QTP4aHPPssziuMafa/6AFrzCyXlMvKknHy3KuN+0eLaoWebiOZFmnEBVwHStBHjDFgcRjBFAZY1RdGsQhlhJeUpw
+1HP1ssOF0DlOxSNPPTQxAgnjEsGbGFi0aFma+s6M9O9//zntFlPM+rANE6YktBJs9+7d8zFypx3TPvt8I536y9NTq6BzrYA1PwyrWRj5gw88l84886Q0cmQ4
+UST8wP/NffZO9933QJow4c3Us0fndNDBB2Yncp8RDR48OB1//PfTkd/7WfrayC2CtsUxydE68wq/JKK3a98mR7rPYrq9UJdvxR9dLVPQZdW+5goYN998W/r6
+1/fMeE466cS0Taw/tQ7YW225ZZadRdBzf3922njDARGtSzeubZt20RX5JH0a4zfbe6o8yZjhiOrkWQykDPzJ2oIr3ZmN03rQIUdYKxxfeVEXz8rSN13oujHw
+teNgfORhskonl2Mpo2xprcpY2EBdQhca1KEvcMkL37pinMF9ToDmsj6k1V8z4JWxW7VX8MCBR1l2qx6YbBnf7rM/tuy63hOIBJ08Oxll8INuvKJLWXToorJL
+dg0vWsGQlAG3KaYRgGnIGLnCBqsAyqvNF68HkHEAZkqREAGyh8zOBEyAVR2pwlQmYnCup65rDmqMkreaBNPqYsCBDjDkgSEaGat89NGsdP21l6ehQzfOjKAR
+PId6NdVz+eBQ3q67fi0ZyB9w0OF5wgDudu1iKjVw9+vXN1dVlmOrr86QwYPSVdfenw7af6fciipUDQDs9QJeixamYmOPXRifCD79vRmZbnLlXNOmfhzdyvXC
+OdcIA4wp4qBZophlMWUeYSLv0cuZ8Uekve66q9JOOw1PBx98UKZxjz1G1dsxppqc/nzp39LQYVtmh9faM76msf4FJifS1VqwQCtQornAoKvml/Lfi/FZm1Ym
+J5pmWkXc6mTGFC1bakVjEimMjTGxAXzTB+eXqozlg8sIya4amfuMk42BQV5+ydhvNXD0wA82GrRO8LIPZeALVWS4yrIH9euEFD3BoxzcYKlLFsoJJGwSjVri
+alf2VFb7oSv0g48OTs0R4cKHxkV9B9tUDhz1+UMO+5orGbWw8QxCAFIYQSIFQh2aVdEBQkwoR4BgEBqB1Xx1IRUxCEn3ojKKeIQp656y8givGrKyYIP50IN3
+xoLkc9mJqgCq0bvv+HICRxl8OB80aFC65qrL0l77HJw22rB/jnC9e9p9vLolU67Cyr/LpmcF1Tz3azJ2+WD6+LTxkN6xhUrXp3lMWLyd/vKXv6Utt9wit2i9
++nTM24+sybz99ox09dX/TBtvvFG66aZbQlHNwmGGpWuvuyFosR1nWbrkkr+H0++ZTj3t7DRmzMsxqzcyxg1t8lrRxEmTot5tQU+z2CHROesDb02DRw5ovOPa
+NiLBUKvCyMjXWFV0V66l9aQoZzeBpHx1BK3SsmXR5QuYHcI2rNeRoW1cur261mRQ5UC/dOZgN+TENhgclTQPWuNPtiEBl4x0AU0YsSfGTp/qwtM07IFNsA3B
+29JM7daBif6Kx84D25U+iOlt8kMgG1QOzxyITYHPKdCmvsaCbbFL58qWGcfSc0ITpwQLv1pp8gEXjWCBbfYw0yoDEkbMCwHgKBAi1lw9obgGQB6BAapp1Epw
+JMoSIRAmAQ4uYajrQJh6jbRoMTCmoOVflIVaAmXMy5aVRzWMQ0TZ4gDL0yMPv5j+9vfLwji3zApEA5juO/f7xhsT8jy/fJFngw2GZPy1DMU732ijDdMfL/pd
++u2Z54WQ10iTJryUZ7oy4V/6Q6kpdcxw6i3wqxE5l2zYDcayUrp1XSedfPJPa/H/83v88S/nvH79hqY+fbpnFzYm3Guvb+T8HUbskneHR0OT/nLlneGUl6yC
+0bZD79Sze8f0+muxbahB2mCDLVLnLmUvGx3SyaSJ74aBTVxZyjrV0jRw8LA0aOD6eVdEcBFT9aV1oRvGQm4c46VX3kgz3n0jdV93SN5ou07HNmnC+EkBY35a
+f+AmqX+0sMZ4JhgkemYfDM+5NbKPP56VHnv69TS4/7pRYkV6d/qHYR9rpSlvjc11OnXpG3B65qBA14yULhkoudaWgR1J1TZ1K9HLds06OtiblpnDgWFJgwwk
+sPAEnpYaLvrjPPTPpt1Du1/1wGf7tZHRg6o8wosWh/JsqqnoYDoaIsqEFFEQTY4BLa/lWPJU4M2coRhnWZMBWFK2JkTUaIDQ99//IE8hEzanUV+yaKpcZj4E
+IAKpq1+KIUJD09Bh/dPeKw0NbdV5/Kpzzjnnpt/HmCGlWC9Zu11aGq3DKT//XvrpiT/OExGF3jITBu+IEcPTn6PVgEsyWP+qZMtSSrNW0ftVZeSRH8W0iXHM
+A/c/lTbbfNt0wHf2TUNi8ZRxahl0/QhewHnzzTfTLbfcnu655z9p1932zDTusedeWe6vjp2UDjn4W2m/mMwwdpk6dVq6NLpyAtBLL4+P8dLm6bxzz8w7zPH1
+0Ucfp3/968Zo6f4R48Y9s0E9+khMjpx4ZLRsF2dZhghz9LzvvgfSRRddkLbbbqcsQy0M+fbs2TPjfuyJ59O2W2+Sfn3aT3LrbT0J7crQ28yZs9Jrr72WHnzo
+8fTiS+PTTiO2yjJkF1XfAuyDj76Yvr779unIIw9JA/r3j8ks61hlWUBZOwOee+75dN55v0/rD9g49e2zbgTzsimXExj4M9xqF87h5wjsCQ52SOZ01zLkxBGq
+ntkkx9StY5d1AkH56mD0pqdlLU4DAB4erBMasqjHifWiJDJwuJbvXBk4I+A2y1EUQkoGCHAFDdQV4o1meiw+IRAxEHMuwlFPl8F99eRhDOOQYZwgOUD1eoTJ
+V0ZrqJ/5+edlAyziXINlXeb+++7KXaU66CPc6kxwn3HG79KFF54fU9V7Rb1irMpc/c9b0+x4Hujiiy/MuNRBD57Qf9CB+6efnnJGTJ9vmfPR9OWkrLRgQXRr
+ViZwakKjhGbdpclT3o2u2Vkxs7bv/3MT67bbbpO++c1v5n14Z//+j6ldGwuPAs5Hadddtk+//vVpmWaw+4ch0os6V199TTrwwAOykblX0447jojWerM8qzhs
+k63Sww/fEl3GHevtVb+77LJLsg/wlJ+Pjoma6JZHy89ILTC/9vqkdM7vTk3GY3on/yuNGLFD0HBgdDFvTr86/YK07VYbZZmGaLMu581blq676o8JTXoqX5U2
+2WSTmPzZOx1++KHpoj/8Kf3njgfToGi5LGbTnYDLvhgtWuiBvdA3mRtvLlpUNloXfazI9lqm8cu6mTqm9+lcAo/9sk+tCccJ98g26b7yDk4ER7UV5dm8Vqra
+EPrITTn3m0Jcu3I8WFPHudhKbc4A5ySSMtVTEQMQ4PI0rZAr07J57DSOSGG7zfIoo6yoLVVBFQcqA1iGoq9K8GAhFF70pDV6ps022zTXrX8qQ08//Uw40QUx
+rb1ftJbl0Qx4ML39NsPS5Zf/NX3jG3vGDNgeq4RQYWy++WYxy8bhS5ei5jf85dBSXQdzjseivDJekOd6bBjiWWeemp2o5i2P/BXBR4FT6lXFar04RY8ePdIO
+O+yQ9t5n3zTu9TFp9G9+kY2GzCQK699//XTxny5Jhxzy3ZznHjlK5MRIjj32mHT/Aw/FTvQjsxPJd9RyaCTXXXbZOeuNQ339G9/MRjLmlQnpxuv+ljiJpKy6
+fhvWr7R37NghnPbYvDv66ON+nvbda2RE/7nppVcnpvvvvjH0tVmGU2GoBw541UjRYsz6h4vOj8B2errkL9eGU24Y9YrMazm9EmW1JH7xzvENKwRm+eBb8xEg
+2ZVxjW4nubAFjqDnInBXOPI4JpgcVjeOczlng+6h1wFOpQcfxpt4U4a9N4a4RKT5ubLBsxuI8atyNW6eV8tXoSBCs2naUXkM6S5qvh0ijGZaUhaBZoCUQ4hf
+EQexGEYYZt2zcwGMoRt0D2OLxwgiKS9V5T4Smzx7rDsgO4N7hIxO50ui7zxqj31ifejhLKBKc4WhhevXu0d6+81XM74M+Et/tHASumpa3R7pmsZGxu6DQvjz
+0/DtN89T1sqRm6SbQr7gwE8Gfh3VyIYP3z7WuP4Wi6TvpnW6rp/loK6yDgmMb+/3rXxer/EBjntVyWeecXo8NDg8l6v3ajmw4JRM9R951DFpXhjb9Pc+TMcf
+d0iqTlTLKF9oL/S6rrRX/vb/9n7pR8cdHN3GmdHVG5cu+P2vsxOBIYgoX+uxKXoGs9JCX2zi2GOOjh3tvWPM2DfrTlm6B4d9sQ2tCVtUl9M45xTKyJOM2+St
+HYHZw5V1fRQdJmjA0bJxGLSwRV3XPn1653tsmNOAiUfX1R7lo9ehOygfLOWa6tIpgBCzH/bXAYJIQER3h/squSfCfhqRQB6jnfHee2UNIaIAQqtDIZTXE1Ql
+3nVD4SIKIeASHmFhTl35unsDBvTLXZssqfijLBgijMcK1u3ZJUemWh/tyjAgfeoxL72a+8rGKuBWR1Ju3XV7ZLBVERVH/dXNzSnqfVUiCzDfmDA5Jgv2yPwL
+KoF6FZ6xY19LTz/9dHbGbbbZJmkJJTzgEe6dohv2u7MvSB9/8NYqNOAqIwk2hZ8yneuarCs/YDg3wSLh31gMj+Rfy4HnHp3vHM708CNPp2lTxsUs4hm5noCh
+TMX71FNPxS6KMVkvI0fuGLoYkGHBV2nf8+uj0lXRjV4jHvLbbrvtMhx/BBGJjV151dUxGTQxdNs27b7brtmR4UCXtG7sJtlyi01yqybAqcMO2Au9sh8J7RyR
+3RkueBhVC6KMnhX63VeuSV4GKPalrqn+CjMQx9ixU8b/2Wd24JRH+/W8WrUqXcjW4Yz272HD+A298FZ/0Kiw39y4FARlU6PFqRaNV284RJSKPFw3j2IogEIR
+S0wYsK2Cd8qDRLIYatrSFCtGK3OUqg4lVGURZvVsCnK/RIamgXNuKHlIpgNcuKvw0fT+B/FkacfygJYogz6bHBm4+xxhwpvv5G5A+NF/JTNyHfOetdKV+a+b
+Ky9W6nkVzpxdM+PC6YLYQrPFJgNi4XZSjnRkVtO9996Xd3vH5v/IIptlafz48Xkxl7LxIzGCDYYMSO9Om7QqL9+IP8qRlfdBnHvueemZZ1+MJ2x75XFU3z59
+Vt2v5cn7wgv/kO684/60TucOUeesvPujOlMt16NH93Cit9KwTbaJKftJuTUlv5qMxw477NA0aMim6eOZn6Q/XXJ5evSRu1atxVT9dQ3BLoz1ss2GDcyOoj6+
+qqPpfp/00xPTJptunZcILjj/3PSPK65Mhx16SA4iyr/zzjvp3tjNbuHb+IfBCxTg2CzKHtgQx9JT0dUG32K24Mv+2A37wT+90416DJ3dgcdG0A0+J1CHk4CN
+d0MJ+I1Xl0Q+2y6blOfnyTg40eRg6/DC37huFjU7RdCcRQHEumaQtbAKiFEHITZ71oiHEMD9MiRM2FHOGdVxIBQs46GMPBgSWZXBkCQSORfVLSZab2gdmzKr
+0nKhlX+qgZWdGGWhTzkLeroFWiNN8NyZFgRXd80qDC89MYaTCO+rUsWLjprwWRMZfTZvaizWrp0eefTpdMGFF0XrNCFmtl7PM4mjRu0eRdcJYayZNtq4LCJP
+mTK1Vl/1ywBssfmvFHjAr/huvvnmPMvVuXOndM3Vd6Ybb7w5F680Kivdd9/96fTTf5P69F0vzu9Ml1z65zCs1U67sli8kwLP7WOQ3jn97OSz099jecFs4ph4
+ZOPHJ5yYnWj9AUNj2nt63sQ7MZYJbKmSKi7nxtQ9unWIMa4F+0KD/EqXVurSoKFv396hy1Zpg422TEcecXh0LY9Nt97673TTzbekn//8lzFe9S6FsobJDozf
+4WHwbNKajXytB3tyj56rjZEhm1A+XDk7D/uUz/Y4AftTT52pU6dmp6vDFffqzKEH+sC2gfbdOJxLyjrgAYO9G6fFmlsZt/DcShQjN3EAMU+uAtFXdY4QjKjD
+aRBQnUFddTiI2Q+/ooCkPHzVIQmnNssijXpwKKffyvj9atUQrm7DxPgZsIF8EVaz3LKpXxRgIDg3NY+mumGkrTDwtzqtNoDVeavP4K/JlH1N1ciXxA7zvn16
+pvMu/Fs69Rc/j9vrpF/+8rgwlNtC8PGUbhitPXhW7G2KldRFp+RX4Knn5ST+hs8qB78lhP4xVSzQ7DBiWPo4pr3JHW/qV1o8TNh/4LD8qMSIHb8Wi8ExVo1F
+Vj2LUm4lzsxTaf1H7bFdOuaY72W022y3Y9orumuMnE7qgYfevXrlMtUmXORxSQTeWXNivBXbgMy0ki0jg0937vvfPy4dGi2QQP3BBx/mcq+88uqqiZl9v3Vg
+7mazJbbFNuClyw8//CgHdg5SW3tLNXTOqMmA7VYbYV9wkxOZgGkii5xcgymRhzqu/brvV0+GHeolgAEnG2ar+OGc4MPrOuPjTSojXkuBAcgIAiDEA6LrVZG4
+VpYwOUAVWGVUPiRaBoZg9g6j1Zit3RBSUWosd8ZsifEOHJgCB1wG2CwMz+Pa8qvBZClkQbSMfq6nd0WSMkFBYGgnJPTPi4euttt6w+zQtV79tZovikpw/r9S
+VWAus9qPVgUZ6z1jX5+Qvj5qRBjNDdFNG5IF/r9gFt5XO5Jy9v5J7kmhhv9KWuk3J01PvXr1yDLxuETDVB1Jnn19dp9oIbSmDe+tBL+yanmf3d13PZJO+MlP
+0xGHH5bWX79fNo6GsBueV9oz7JU0egFMp04d0t8vuzw/Acye6AFeduOXATryeCh2fVgi+MlPTkjPPvtcOve8P4RjCIjlvRycNkf6MNgaKI1xOQX7oWcTCeyC
+rTBmNinpXTF69sTZ2Cv9cTo0uSZrAVp9tsi2HMqzZ+XYsXto51BwlABSghY5KMd2GlfvBUCmpADmEYNIRCjHyZwrC7Dr+gtYdQ7E1cOGQAS6V5GCjRlCYcxV
+yGC5V2gyQ+SJ0DZp3LiJmZZMXPwpzJRmduONNkjPvTA2O5168BAYRYrCn8Rs2qbDNo4I0yFXh6sma1+T3iyD+2q89V79reV1EWuqefXa72uvvZF23mm7dPEf
+L4pB8xarnAg/X5W+CoaxnfS/aGFkXWNXg0khL1VsSFNDHFb3Z3zoYb2y88AOkv9ORQYcqmuPgfGA3/x0/vmj8ybYDTfcIMsfDf+Ljq+i/dP50aOIx3b/9tfr
+48nei3JgZCd0VX/JQoBlKxV2x44d8tLE3/92cejLU9Bla473ArIXemTYnEpLQX3y6Jhd0Xk9p3uHWWL5yknsCh3smd0pD5BALYhzOLDwVXpBpSVk31pPLT+a
+0aHM/PkLsn3Xa3VjYqX0JSuw6lCVeQRgGgGmtUX59dZbL78Jx85fwBwY0ApgHuEEJg8BYIHh2m/ZxlEilToIzi1jKJ4AJNFUXbu7X3zhiRiMvpvzqwKqge66
+69fSJ3PeybDhBs+qt6c/1Z0777NYYNw9aCizVfitMN6L2cZ773kqtV+n76q8jKTBHzAl9dX9cmKszZr3zpteTznlpNzyoTvTF7ySnxeuvBQvPrFP8O6778kv
+OQGn0lFhMpSG+V++r52yN09LNHPmrKz0Wve/fwud5GrSp8p0dZnS4uHpg/cmpt7r9UhHH31UNqZKO14ddlWMGfNSTHA8m+6//4HczQIHbQ3F8fEH72Zed95l
+q3Taab/IY+gbbrgxTZr0Zh7XgEsWjI69VBjk5Bg0aFA6+aQfpSefeDiMssywKaunorfBdqzdMGy2WAO6Fk6LBb58b5VVVhLM4ZT8KkMmbFM9j5iwVS2cfPVt
+MoazOrBuarvY7qS169OnT66HLjjsRaz8NGUoGIEEUEwC6FwyRgEU4ZpSZfVPlbNOQElgaLk4jHxlJQS7T+gQKmOHRMuW+uqrHytGuHJg124fOMrn1LhLeuaZ
+Z+Nx661WGXMV0JbRRTj3vPNjsHxS2uVro/LiL3p7xINlt95yQwxiT831wPmyIzz3/AupVbvWqWvnMvYryP77LyVJZILmLyd0LF08NW2//fdzlwWvaM9OEJb2
++ONPpJ+ceOrK54YWp6mT30h33nlXXrfIZRoArHJT/8tJWe8ucI8sjWG/XN/YLcw/Nq7GWHLp+5lmxuR9fV8uC36Vx6hRu+boTP50WPNNAhx02EmxITeeDo6u
+62OP3hcPSo6JWbvOWVdVBwEp9V1/YI7UHqWwYdcevgMOODSwLImtTgdE8O2Zd2hYr9k4Jl20RGgCA15p6NChqVvPQdHy2ARbuoL4FQx0a3Uli0OVRVR1TD4o
+oyHgDBKYYMsTnNShv2rHbL32mJTVZaxOZhOv2dzqhPKr3Qv21Ufq/Yqvqe6VGTjMcAjX5v89EiHitG3rmf+YCQsiEe2+iG9GDQMO/VX1EUUJDi2DX4x4k2mn
+eAFJxYMxhKhj1dqEBII5nhZPQguDUX9k7I6+4sp/pW/H4p8nY92rrRwcPzr+hzli/P7ci2M6d2quv0FsTD3vvAtitf97mcZKX/3lsFdeeW3aYbth6a47b8v4
+c8Uv/amzeRQRcfhLd+My8EsMo2FCl3TbbbenV15+On17/wPjYbwpCXWDBg3M95RpaOD4tVewpgrDtXO0oF90bBePazhnxKsS8uLSTGu89yg/K/TmvCmpd691
+c/1arsKtvwzpy4nBXHnlP1O/Xp1ik2u/NG3a9LTTyN0DVq9clK5XJ8EjJpHCZv58yUW5dXHvoNi1cebvzkl/uviiNGSDzdKtt92bPl0Qzz6t1TQ9/+wjeVq7
+8FAggRHE5zfc0jHdszt0MmbycXAIMmCL7tWD46CLbtHPRmpij+7Lh1P3TvAGg90J2vApBz59wyGPjcJb67rPXuEFC57GraIiQNWDFTI4LU7ROHe5bDBk+Jjj
+BJ5r4Z36k1oTgAwgOQrHAhzSOmXoeSOEaXbBNXMDlrx6oAEs1+7V6Xjl7QSfv2BhvATk+iwX+ODACLrRfGzMOD3z1IPRhXoqvfjiE+meu25OHogjGGXBUVYd
+yYzUC89PiPxyXfPzzQZ/0CKBUVND49cCSOhvmGqZI444NO37rf1DubPj2aaBuWXt27d0Jb+M03vi4mmk/3KuCpOPaHE6RDcDLhM4UTDn1jIVHvlsPHRg7jFk
+Bw8SKz21bPkttDd8WQsYyjKyn5/y09Snd/fcNR06dEg4xAU50Llfy4HjfNKEV9Luu+6cnch98rKw+rszT49Nsn+M9ZgWcd0hnvhNaccR22Sd1bqFFq/u+iS9
+/96kwF/sBwxLLb169cq/+GLQfvWK2BhZsE159RztumOV52prxkgcAFyOww7rPICyuntsiXNJ4MHHpjmV+9WBBH/8lb2KUUZTzCkMuhCgcm0xOAoAVoCtIBMY
+QMo71xWRjxGeizhIa1LGPUgxqi5HAR+BiK+Og1kMutbimTp3Xz44nh065ZSfxcr9BnmBU5574FaB2cXuaJjc40RgC9cc58knn4qW6hdp91HbxfM4xVGU+aqE
+ZqltPNtTE9wVp/Ge5ClavFIEXBXeRhttlK7951URWcubTHVT/lfy1qUYHWYH+D9lgg9p2rszQlfelxBTsf+nUMkgw7GvTojWW5cl1vFivPlVqe7asJt7j1jv
+qnz5lXbYYXjadNNNsk58zYJeGqZazm/neG+fXegNdUIO7OeEE36U9t9/v+wo5OLhSq1DlWGF+Xx0tXccuUeWI1kyVDDANE6yxiTfNTtjc2gynjax5LHz7Bgh
+ay/7XLoiglsc5MHp4KNPrREYtTUSbNkRmMpJrtl31b/fyg9+2S97bxZraDYrN+Y4WhKZiMwzGisBdevWPTsXHVZCOIGEKMoE3B4mTTDiIEAgxNVhOF6tr+vG
+YdXHYHUw9zGCKQJBB6MEEw7RaqeRu8bEwSGxs/mRrPTqRGA5lG14qCffLwVyIltehg/fNxkUG1iWcUn7XAZfyjb89V5wqfa/nVd8zhs1snWqbzwO8WhsA3pG
+Vk7oqLjx6+sMnEj+e/EELWW4vzrF66FC/p4ZslAsVfrzhT9RvG4ero9I577cygKVdg8QDonA431+dOBhwYaplrNlptna68VbYm+L3RZvZJrca3jQhzUVBstG
+TJygXZlKP5rW67FOuuXf90SLW2RQAwk7UE6AGzRoYGwx6p+dqOIAS9lXx45Nl19xbXxep204Q3kuiN0YThg6mBggOy0Reerq1YP92YWgPFweaFwSr0WT593t
+4OcAH/c4MOeGl+3jjw3o9SiHLmVy+RAae1ZOb0mq+WjS42LDjsYMGACAJb8Aa6E4Vm3mEO8asYycgtSTGKRyjG1evK2lejg4zqvAXSPMGz0JRB0EijZgYgI9
+8MAnUWKFIyLtPmqbNGrvI9Jf//r3DKMqAi0cq+Ehrx4UYlvK9ttvn0bsODSEUx4rnhUvR+nUpV0o8rWMT31JPfw88cRTsYVmq/TAw0/llXX34KzlJk6cGHxM
+jlm7HhF1j07PP/98rut+pU2dmm6++dZ4dqh0Ud13SB999GF6KHZGbDxsq5jpKlPyFYb7Iu2rY19PG26wfsbdqWP7NG78hMgvK+5kjGZKtaetS3zx4v33Z+T3
+/D37/NgsK3Ckagx2YKy91hqpezxpe/pvz8ovVIGzysxvTYzYg5UmT6RaxrmV/xeefyJtMnRwPMZ/dAS6h7MulYGr8ljtoNavMrSOdPyPTso7Gzhq1TkDrkYt
+n62wJ70A43YOIbEXAY+jsyF45ZGFPEmer4uA6YU47hmTsbeSik7BZKOVdrbZ0B61gnpYnJhMjMfYcpMBAwaP9hpajoFhrQBkKiMCUJ4HsHsQE4j8ipR3ugc4
+4binP1mZ4Agiky0ejNOgGTz5HKY6KcIkMMCrzSl6lJUvWm22yZB0+VU3p6efejKYMegr06V1vIM+9DO+yZMnR2txX/r9uRfGw3BnR3dlZHQJtJ4l8kTRmOHr
+HN2vu2LXwLrJ++TQpu4VV1wVW1suzrNNZsnmzJmVF1pzlA9FaIFOOOHUlY8M2MXeMva/XRr9+ZY5wlEUHsBitH/581/TBedfFlP50+IdDr3iratlkP9hyMWb
+ghaGbDp1ap9uuuXuNGhAn/ywHXmQ7+WXX5H+ef3t8XTsOqGD8gj325Onh0yWx9hrcJY5ed8cM233P/BozLJ5+1Os+4UcvVPi3cC54YYbZD1bq7GJ9gc/PDlt
+MLhPlvXEcN7b/n1n0B1rMRGR2QLaGe+rr45NF170h3TzLXflbTv9+/fNYyX6evvtyekPf7wkXsxSPuMD7+9+95fQ8+yVeinrgeA5GDojtsj++uuvp6tiP98J
+J41OA9bvFW8sja+BRHBlP1pA+uZIJcB6cWl5Fx/+awuF5+pQZMXQa4smP1BmeOyJDbENdMMBdrVXDl9bHfaoDLw1waOMg51Vh2Ur4DTyWRcCg0CharSUJ0Hk
+vkggcQQEcRT3lAdU3eo86iqj9SK06r1w1AEbxtRh8H6VI2jX6tb7GKr0MUj9ZjBEinfemZ5eG1seud5++M7RKvSOuX2tUJO8jvRhbO2fEI9bz4w9Wzvvsm1W
+BAcSDKpiCcI14Tz26Csxhb5lDLDXi2nel2M88nHacvMN86Mg0RGLB9/eDNwt0sgdt43JjwXplpsfCcccFq1piYyUY/Fz8uQZIafF8Uh7v+irR8sxbnx6+aV4
+J97m28XsZYf8vu4nHn8wfXPfb8fO9Z7RskzKhqOLgkcGMH7C22mLzTaKl1C2Tc8+Nya9NGZsGrnzdiHv8vgAXOT76CMPp+E77Bhjx0FZHi+MeT1ahoEhszJ+
+pB+yfWf6B/lFmd/+1p4xCzUzHsr7VzxisnfWKX0xOOOg++/TNZsdLf9eEeDWjE3BH8X3pj6NcVajCDh2FixJTz35cDriyO/l2d3/3PlQ6hsTEt5w68sVbMGW
+oGnvvJffJ9i8Vbe09x4jo5WJ97/FUooZRb2Wt9+eFu8RfDD16bdhtLIDsz51eWsLVO2L7iVwBXT32YbEnhbEjDB54TEui15TeSMQ+/xsYXn3vLIND/fIDzw2
+QE5etmmTqnJwkDEHQgP8AqiJmfLUdPENtpMddqutt1+hhWCwjCt7V/zWaWlG7Z5fCCQINGkIAFxTCwYnANhMCwJqBFKPYBCjbiY6ytZ8zinBDZaBOdgcrEYo
+9yodlTFwRCn19fc9W8PQzGwZv3lEwuZb99W1Z8vLMgwQ0SnBI4qByQDUmzPHZy1bRAvTOu7ZA1i21TM2OyTsSO/QocxQLoztSfgEhzIpCK/WyzzoVt7b93nA
+8uRxbAD+bElqF3U5pJk8Y5wWsSPAm0+9ow6/ZO0wW2q2cr11GWqr/OgKmcAhaeHt/MD7jBkfhtxjCj5mvGhJ94g+6EFramHRxMPcufHSyAgaPXt0CxmU7g+c
+ZNm9e/eAFdu5Yjy6NOTIqclA3XmxlcpYS5dfd9JDlJysezevBGiV68PFqNCu60zf6PXev48/nJ1mR+vWLt5V3nItL5DsHpzHv6DF4O/TkDu6G9pgXGaavc2o
+2gOa7NDu3LlLyG+mItnolcEHB8gPlMYEBH1ZvKbbaismx9igPAGkOiw4bBme6kBg0Sm+5Bt+wK+OXzap9XLeaOiwLfI3ZDGsgISgamgQmiAAVN9QsrXGuEhL
+Y4euRHgMFhzlEE7h1bkogDGDhzjnBE0oBoe6l5zAvdrVg7PWwRyaKkN+TV9K6Kj3LVqus/I7Re6LOvgCS9ONNzgkdShepCNENOrvahks/qGVExE+OpShaHxm
+eGFcIr9WEkw0gQdv5cNgl1Gt0SwG9muEDOMl+3j+dH75/lHLlmtnRVQ60VcDDBrhhdOBHnKFp8oPzZ4WFeO0hgIaOpVTBp0O12CU7mZ59xuc8tBM5s7pS1mP
+NdSuujy6RJdf9kDPegUCAfiMTT3Jm4IEEHKtEwHWHZ173g0Mzt8qvkEVb8HJsIrDl50I9KKMQOHpajO4cIJHl3CTQeG3aZYf3IIcGc38uLwvJOOJZ6T8MvYK
+lx2Bgz+68tLOL0LfZFNtDxzl8Yw/+iRPsiAn+OWBAXeTvv3WH61J5wgKYkg3DnKRup771YzOn2+ae1EG4D6i1K1O4QV+ALuX36kdzgKWPq8yCMMUQgjEFnye
+PTcmA+aFoKyVuIcJSXnn8BQcBRcclelaNtMTjBK4soSB2SoIBoaW6uzwMFyJcNBBOSI4fqshikTwo4VyJee+5mAcgrdKp19w0ZZDbJT1/rwu8apg+wrNPnIu
+kx8SWSjI0NBc6VscdKDNATf6HHhFl1QjZNFb+YIIujmGg17Ac1+3ynoeeVenMTuo9xC+kN/EAyZ85COf3JyDhSew8KwX4Bw/6HEP7c7J2Lm8InebQst2HLOJ
+kOklfBEPbGqh8Y5O+kRnhYcOsDgkeulSUoYxu1+cN7auBW8dwomyQUd5XyIBRxK0qm3jhQz9gknf4Nqho7w8NIOjZ0RWUTzbA37oFt/4Y2sCqrJgNBkyZKPR
+biBORo3ezgGHjMJEL01hJVB5RHJCCJSnxAoYUXmHbTAOuboMHFPguTbdiwlEwUN5yoKBYU5QFVuZJsgqcH1aeNWnEC0bXmzzkK8OwTh3VKNAr0OCA0+Uhi78
+5ygVecqg2S+4aKplfM0hKzobWDEytFZFWVT8PJThq9/V6Hz5gfH53I03vGZjj1YKbRlWtHxmEZUnP7JSBk0UCTY9uC+pJ48uRGwOJ8lTxsFR8QiWa3yQvetq
+UAxCa12jq3zRmVzJjxyV8etVxrqtgim46K66qS2cPLiMsapd5S+ehxOpK6GDngVO+tS10/KAYWHaxlM8sykwJNfoVhd8PQFy1lX3Si5dUu8/52D01irGruRo
+ls5LM8sXV8p70A0fvOu77rbJCLTqQR968Bek5USe5FDxuyYjtDnHf5Nu3XuOdoFAXTKCZxAKMSDCIlDEA+SwJiBff1pXQtdF9wcyCCjULwOtTLsHDyLBsIBm
+IU0+7/dLWcqL8HnGKWhSp8JDsIOg5CtnYoGxEaxyjI7A8IB+SpWvy8dpwZdPiZwCPGXR6hcNhItOsMjFUY0ZnT68ZXcyGAzLh52XfV6cPkf36L7g8fOch9bo
+hsS6RphClosuTlASgSSmYJtEfz26tO4brNvRrYsbJOYnjNGUDS4y0I0W4x8tGr7kVecmE/zi0T0y0fXzYkcwGCS9ue8az8pwIvzWpAz4xhpoW/BZeeGiCRm9
+Et169VuuFWsvYQMcmr3gmfXpDsOl96IsuuDUEhuX6TaTO2edNdMrq2OhN/jQQuEdXVpPep41e2bWMdjGVAIR5xPE2R/9m8Ej28aNS+tDvqX1ixYs9KRlgYPu
+ygfZyk5vfFpS4bjgsFnl0dE+1jKDpNy1RLsDH+pwdjInO3l+mwwesuHo6lkESXGSCgpVgWstquBFbIy5lwUaBleRUQplUzAYDJAywfJLwCYyasRVDgOEhg7n
+CKNY+BwEoL4EnnvwOXffOWWqB75z+RJ+ssAiMlFepQ04tIPrPrwOBohX+aKde4wBTHjAI+hKD8dzyIMTD+Aaa8Enj3EIOBRuYoHToVUgoTB8VDrgcE9iaGHL
+WdbkQw4OMhZELFy7pgt0w0U2tSwYzr0nHE1kg3+8OJTFn6N0UYu80cJQBAm7FSpfunRkZDcMw9OFUgZcLQCY8JhEoWO4BDnwtUj1wUWyp/fZs3w1r+jfs0Ho
+gRsv4Hxm3BxByURMluPS0r3Duy54CRreoOqtwPbele4wmZCdXz0m5eCT2J1rMnbIR7dE9mjAI/xVx+Gj2YmVJWt6dQ/fVVdNBg4cMtqF9RgMKIAAzIt8zitA
+nlgVpk5FiAhltFwE57y2LroLCKDQOi5w30qy+u75ZaDVoDhq25hBwzDGwGSgxWBLS0Cw6mGKcAgBk8rUVAUFrvOsnJWtq0E+5cuvDuk+XHiUCIpBoVc0NrGC
+HjNOC6NFVVfrhTZ8kFWFV4OH1ghd+CVTkRJM9PtOrTUP9eBChwQWmHZQ60LqworkFM448YtOdegEfPKr8oSHvOmjBiy8qiupJ+lJSPJNT6OJvtwHS11fw/CO
+QnxpBdDGkRivBU4GjS740USG7EbCpwSeaWN1Jc4DHrkKLuQNFzmoT8Zaa91fOLt27ZZp1Jpo/SufelCCEbvSqhT52zIULU7oi/0IYPCa8ofDwb7wDD9Z6RbD
+yYbARpt89Tg4+sCGx33BQ88NrWBk++rRc73RIpwmk0AoYO0QHOVIgKngGvCaR3CIMWXMGCCATBdKOcgJVF3wwUYggh2E4FodDCCm4mKcmvbKGJwMxhqE8nBh
+wH39ffAluBgDuGhzTrlRNMrNy/eVcR8MggZDWfxVntHiHJ3KK6ubgi/34AajCjsLPMr6rcYNprrkAA651qlYdY0BoniGozuIL2XIFfwi79IVg4eBmxxgiq6V
+lZQlW7zUbrlr+OGp/Fae0eMc3+TsnRVwyxOEGD960aElAN/snICly1Z5zl36oENggAN+sOlJHQZLN2gxlilGzchLKwWPc9t4tDCu4SRfQQRNYHAKegSTA6Lb
+UIBe1Zk5c1Z2DF8eN6mBjyiaFsdH0+ijLs7WCRI00hG6HFX/gg4eq65cV1jo09oJjmjxsCr8dCJASU3ad+g0mjMgHCCICAZDKlXEujmQYEg5QsColqxGMEL2
+cWBlwEMkhYOjDMbBVpehYFRZ91zDpQ4G3KMUA1YCMltGGZQND1qUVwPigIcAAEAASURBVA8M5eGpExbyCKPcL04hj5I4BiGC4bziRjM4DsqKnxzJRF00GRui
+3bl6eIOj8skQ5OMx447WSGQETz7c8HrosHngMgbBo7LKkANnyLIL2oKxLGvwlcG7X3ygvfKOb9dgUy6Dd0+9HIACNl3Br35WfPCAXvISoUVsdEv0K5Gzbike
+wNOCqWNWk97JB81wOOAjH3qGDz0CJprQXA+81kCgJQETTnLFC1jZYYMGRuwcvFlBp0kF/FkTIytrbyYbjIFMENAbWuN/1I2JiZALvsgHfegFj4PByabkuY8/
+sq+yxYvgLXgF+Vl+YHEeOExWuJdp32ijYaPzYDiAKYQZrz8qxlTGHioRAsCMl5O5X4RaFlYrsxh2DhaBSIgjUDM2lUhCAJeDilhVsFWJ4DhEPgqpNDBIjBMA
+uK7BFiHkMRR01TrV8Ny3VqVvz3gITfcOnQSMP7+itPqacfhtl6nO41eUhV95+7vyYDZoMMCFXxm0OldGywqmrqEPjenzk4FZpEAXNKyVaYeTbHUbReG5MQMF
+ppelKM849ftrt4hc4aBJ3T84GWFJxfDpiIzwAT4Zkxc8aJPQhW8BCzC8SVVPljDKNiyL9B9l56EjMFvFuMQEgq4Rm0ADOasrwQEvw6tyq3aDVvfYirU4Y0pJ
+XbZBf+TrlW5Izbpp2z4HE/ySA7haB/fYCVhgcrI8vg3dLo6dGOwQvWwbneiHn4OTDzrRxfHYg+6jaf6s91iHAtNYi/zy/YBFTq45Oltp0rlzt9GIdwPwKnTE
+QQwJhbkPCOB+ax0MI4YwRQXdgEq4rh3F1Zbo41go40xgY8KB+Sp4ZR1wKAMPuPIwI195+NCjDKEY2KKDAiqdlCoaedoRDxSsXw8W+tR1qINnNEpV0fLcJwM4
+JTCr0Ctu9dBFkfhQFl/wmYo1CUEmCxbE91UX6T4Y9wlQJXpXeOjw6L7dA3AycF0+v1Xu8sFFB524xo9ruHVf6bDqSH1dbQ9n4oeMaiuuPl61LmgoMiifiZQv
+uOl2kTuDVqYGLN/TXRYD/8JHacXIp+pRADBUYGgMV100osuvlrHK1s4QPNCjfGXJDz0Wdn1vChz1zLCRRR0nkYV75GCs9Hl8awqO2mLl1iry5SmDrxpY67DD
+Dh10o6faGRmRI3kJdO7hPUjLeuY86ISHgynfZMDAwaOdAIYBBKsoVSIZrqlGjOheuI8hjBMUpcjznU4CwSDC5VEEoorSy769KiwGLaooBy9GEa88PH6r4TPQ
+6iTGcMopz0gpzaJnpSMbcdDvAcUKD6w6loIfPPUpDH/oJgdl/LqPropX90FCp/uS33roqonolCbCUpD9eAxZPnnhZUnANS4hO2sW6qNHFPWaXWsfeX0tIm0N
+FlXR1dnBQptukTzllMG3fOf4Fa3zOChoQk/lB69krwy9oMVWILRLZAAH2XEWDklOymrR6YxzuU//YKHB/WLIZRbXPbCrXvGZ6Qq5Oyd3s23ga4WVc1F161eA
+QbeFbOtE6kTBLDvl60ttvOqMYWvZvDTSBAk+BQM6I3v2UZZVSutFt5zSfWXtwC/6W/2eRduh2GKVsbKckd4ERQ4Gd5M+fdcfjTkFJBUAA7jMbhQGRS6IAUUU
+hjBIABhWD3DnfinVuToVnvIURIjZqKKOyMeB3UMHhanHiU2ZLojtSLPnzAuDKU2te9UhDL4xY+xScYmGBpZwcGL0ogfeHF3DyPDmcA2fbgfjkLSY6EWP7o4A
+ousbf3I3zhQu4yiCLFFUqzFh4pT06ivPpS/iy+geYQC7DNJ1T8uWGNPV5Pzw4y+nt9/5MPXq2TlaoegCRn3y1B1Rh0FaK7FLpFMMcJ0X5ZUuK0fBb7OmMUER
+eBrFNhvyzw/wRSuo22Nig0pNkhgoO9e1RRcZwUcGtUXlyN7f/VmMM9rF/j3l6BwtWlswc4sX52bPOJQAhl56Iy8HvdO5eoUnzlGCjvvkRgbFqWI8GWXlcyY4
+TXnPmzcnf5LStSDkPj11Xqdzthuv6qJPupkfYx280znnZvie1WI/Agka6A+MEpzLliI0uGfBHA/4F8TQ7fOh6Cl6K/onczTUljzDD0fGa6ald59+oxVi2DIo
+iIBdMxjnHMq9Qnx5KR/CMMCoASVA0Y9iIOSpYJQoU56BEbl01WqXj4ARhhgH5uADk5AWL16WZnzg0YUB6aPoFr4xYUp+CSM64LB9hKApEg8hufgY8iex1UjX
+zTikTCqgFR5l0UlYxaDKY8XZKFc5TxkToY3BUQLajIU0495MRCHGDehgZG++PT0dfuj+8T2iE+NxgD7ptjseCAfwBtPumS7Gwzk5wyNPjku/OfW4dNTh34n6
+S9M119yS+q2/XsaDLokc4Kxf4StjgqIjfMMbpYpzhZPofljgDrsJvksXq1mzJrG7+pF4bVf3CFYd42uHD8Qm3t5hjJ3y9HXtPnFe7wW8/74xabvth8W6Vpv0
+wCMvBf2tY1+gKfYyY8vw4a700S06BRH5jNJ1Xb+hd2WKkZcF2hrQ5Bv7+ESoVpkTaCmyDgOB++Tl9QV0UBwKX6WHYOkBXPZENqyejsFnf3CTEcdQjsOjr8Kp
+vLvWa1C34sYn3GyCvZQF8rIGB6Zy9K8c2GzfeaOdRu62AgEVgILWPERPUVolFQgJQkgk1+rwZgaWgcU1Y9Ac1zKlfF03KE1sNXwOox4H5KyVhshKTz4zNv3k
++MPjZYvH5BYL09fH651+PfqCtOnQAbmLBI46DM6ugIcefCU+tLV5fun+008/n8a8ODEeFdguK0BZ/FA6BfjFA6FrhbRsNuOaISwGYKaoTK7YTqIbhU/RSuuo
+nunQKVOnx2dJzokvUWyXnx71IpGJEyelgw85KhtlMagmwV+8OCZ2L9x0w9X5cXldEHzffvud6ccn/jKesRqcFY4WrQi+TIbQhy4LA/00d6nCSCJ6Gn+ZyMh9
++DCm7EhB44rIbxX0PfjAY+mPF5+TDj7owBycpk+fHs8UXRyPh7wSj250z/Qw0sVBU48e3dNpp/4svxiS3t566+34ftJp0S3iRD6sRS5lX5pAhCc2oTv9xRda
+hrL2Qp9sBC2Mnu7JmT0wcHLX3Yvq+VGM7YaPTOPfmJLmzpoaj5TsH4P2udkZ6AoOXXi6MFMn5S58BEQtD1zuCdjsl5wcAhCcDjCMwdCBLrgFM05C945Fi8sY
+y1Q/3VabUJ8OwGTTcOEDTMHeAZbkvMk6nbuOdqKgyjyX51l8BChHxriHCNc8kEBEeIAZA+QG/PIrkYQBZiFCi7F6cA85x8AcmGY9GITIwmA+/nhO8tzMmWee
+Hu8x+yAdecQRObofcMB3otFZkh57/NnUr2+vzBSnf2/GByGEBemmG/+eX3iy88iR6bsHH5h69e6Sv5LQvVunTBsjEBjwqLVFP9oNxku0Ed3KtK9Ibdqb4LQs
+XnLhc50dO3bIgtOlfeixMemc+B7S1/fcIx5m+1286PDr+d3YRx55RLSig9Kvf3VqPNvUL2B/np579on4fOcD+WsRJ5xwQrxl9FupV69e6aAw9AUL5qX/3P5A
+6hXvlyuRMF7c2LVL7nvrUul+0AN5iZKc2oDX9Dn5ruldd3FfC23C4sEHXo5XYp2dfvCD4/ITuxdeeGHabbfd8nsZ7r773pDpR2HYYVQRILTe1137j/x2Jjzc
+fPNN8bamb8eTxNulS/78j9yC6R3UaEw+aEQL2fmlS91jBk22WnH6r60/mYPBZjigOjffdEP+muLxPzg6Xs81LF1w8ZVp/b49Qt7FNtgFGMY7z784Lo0f92I8
+ibss3k0+KfVct1tQUZ5PEnTAU1Yij0pf1nMEI8n9Yr/lKQO2zkl06fQuyE89vw7OB5Zz5SoMPMAnH4zqpNFzXt26cIQ6TakA5LW7xoEIhEMNHDgwA2GM8kRX
+ZQmuGqn6PLYQWyYGasQCQz7G1CnP1JR3kDOcZdGNOvmkn2QnGjBgQLxK9yfxlbnd09ixY/PrtSZNfCVeIH9dfo3WzfGAWs+e3dKD99+eX9Zx66235N9HH30k
+HRUGfeSh30oz4it46KFEjiFVYaEb34RDSIJK7XqWBb4yQyNfN0JwIAt09u/bLX/E7LHHHs0vfwSbc/zlL3+Jz5tsm86PN44uDV6eiDHRtddel59QPe644yLy
+r5+N78gjj4z3N7yXvvH1PdOH78fYJZyTUTJQ6xzkxZm1ZsYrzhk0nZj8+Cy6Q/LQgz+y/OKLFfElvOHpqKOOiKdgn4qv5u2YH6/XUqL9oovOjXHVkpDJzPTc
+M4+n6/55WR5j0psXf2699TbpiAhcvvJ++GH7xxuZxkfUL7NoZEVOElhaeDpmM2iuwZVNoIeOycRyA/qdP//cy/EBgMvzS1XOP//8eOnk/fH+u/3T1ZdfEC/r
+vCPKtg8dmEW1nNEiPiN6TzrmqAPi8fk3ws5ejXdLPBO09co8CGqMnW44cZ1QQRt+yEqwLC1HeeoATWijf3XYsAMcMtKd7Bw79dmmeupLeOJoYKpLFmA55DUa
+vsPOKxp6MYFQJmJ4nAoQA6IcYSijxVLGPeUYqcF53M5lKyNmrWwlgVA98AhcF2bSpMn5ZYP9+/eLSP5eTHe2TBPfnBLjje/EeOOEMIajciQVvW+77bb8Yo3z
+zjsvdz1ef31cGNiszLCWCh39+w/In1M89NBD88sGfTnBm0IZ9V57fyt3HQhdYhBoZATq4s2BJ/wTDoVymFIn+sQxA5QH1/FskY9zHbD/PvmrD5zjkEMOCSPc
+Ot6iOiX98Ic/jMfb78l4yEWLpJvy+uuv53Ivv/xypvuMM87ITj9w4OD8oNrOu+wWsoO3vKHWoJ68m0QrTemMiwzp4sNoVXS/rTOtWFGmbyn/kYfvj3dC3JCN
+0/Wrr76aX8j405/+NPnc5EEHHZTrMxB6owuOP378+Phk5yUZD8PXFXzxxTHpa1/bJb7wNypkV3aP0KNuGjnpYmkZyc8ak5bIOcfu0aNHhq+ngmYtuE+YXhbv
+Bj/qqCPjW1J75Rb82XiDKxz77/+d+HTp3umeu28PucW2q7bxTrl570bA1ELuFwFnenxz99YIXN/Ige7Agw4NPIKONaDSvaIzTkHmNSCZQbXrHl14pXcyro/7
+eFyFbtSpgYJdVAfiTK7xRF7w0Q85kEH1iaYQY1RBvwBSgIIOAlGBQHR3TEMqa3wAiKjBUDDB6KrnglGMaFlqG4QY4LnO8KN78v4HH4eCRuT3Fjz2+NNRb2Ee
+yI977fn40sJfsoH/4x//iJc8npcNkhH4rAncXhLZo0f3zBg8hNO1S9c0+vTRiVFLBC5ymAb1VfIqSDxK4DBIi4paGvfBko9fCf94k/IqeeCx1R+ItyaNjUfJ
+f51hTJgwIUdz5XQD0XPjjTdm/GD60PFmm22Wneu3v/1txqOs/j1jMFbxQWIOu3BhTJhEV5deyKzMyplNLVPt4Okmdw/+Q4zZkO19Wx7OZJtMfGkpbbXVlvld
+FZtvvvmqlzVqZX7961/n3oSgZVzTp0+fbFyXXnppfP7lPiRl2nw5nh7teCl5ZZ3Kfj+9h2qQy5eWLhU7IB8Gh3eGxpDJn82suWb5hvBmWwxPe+65Z25d8ChQ
+jho1Kj730i+6uvvFezOuzO/B8KKTt9+eHI41Ku0QXx8URH1v97LLLouyfXMP4vvHHZOOPvbENGRQ7wio5cMJZFPssPQc8qMTjaMFal82UZcJovLoCT2jMTrK
+OajTPztYHJM3aBbAOFYNGuCWgNAs75Esj6SvXiJqWjyOELxjwZaaMjUt0jAiDgSYZpyBOa/jIsTYWoJAA3FCRBxBV0IogvNohmkeQbfcckN8gfz8tPfe38h5
+3//+sfGV7IfiBR9X51fW9o3X2oqQP4kuHVySSO8FkYSsKwY+Jz788CPSgw8+mE6MD1lVJ9Jdui8+8NX1pq7RCoyL2rMy/QyQcBiJPWIU7yADvDinDJFHRCI4
+60Nmdlq3LtHHGMTTrhKnUU6Xqlu3btnxrrnmmtxd8U0ghkomjPT444/PLdK0adNyXX/IigLxMnnaxzHuWzc7CfzoMKPFGLX0Fhy1PKI+fCYann9+fPps0bK0
+9VYDwjHKTF///j1zK3/vvffGZ1QOzfDheeSRR9K///3vLE/GiK4bbrghupzXhgwPzwGzEkZfNdLKM91M723bNs7vi/PtIDDpffbssj+PnvBRHElAKOuNxp+S
+1wD4cnmnTh0joNwd7xo/Oudr/YwTPZHbu3fv5POb7KcGPDTvu+++uZsr8Dz66KO5N9StW5f04YxJ4Ui9cosCmLdTsVEyZcfkZ1LGJFIILtteeQzdpgKbVdvH
+DhKv+ipbheoEhplYs4jszD1l8YU/srENzjII3UjW/TwIkoWiAAWqEDhzJYBEDorDBEAcyhS2roZ7GHbfTI37xhciOSSYMong+f5/XXd16rne4HjjTb908HcP
+i2/xHBUOdXNEyEFZEF4K8qPjj4sPdV2cI7E+cf/+/fOHrwyWn4n3pT3wwAM5j1LRaabv1ltvzUbws5/9LDPlz5NPPplO/tnJ2TEeffSx+IJD/5WOUbpvnBmt
+nm/R5ZBcF94bZWflcORhVsszQ5yNsSoXSyg5aZXxyZlE9XPPPTdPIuirk1HDpGullSKTmtTTXSOvLh3LU50CGKdGF2fKkwohYwumxk2S+wvjsy1/uuSM3Ipf
+dvlV2XAtEYzYceusfF0zNF999dXpsMMOi0mYk/J4jPM3TL/4xS/yPkY0cB6yMeYtvJftQtF5WrkT2xYpzw/FwmvszF68uAzCXbMZtmGmEf+MWauq1deaPvzI
+K+lXpx2b677yyit5HIYOMhesN91003CiEVmW6P7BD36QW3JOBB77Y48mn9iYb0VJpadQ4HToUNbv6CTrKeRGhrqgElnLr8kaJb7xzJadC6b4wJND0sBYRFcX
+PGNJdm94I4Co17QaJUeiZEqyQEjJjImxQASoe4CJ6BByIIJSFgLX1QABh2xqvDN66y03TX++9KIccTT7ZV9Xiu7O5nlB9rTTTosW6vd5ULrFFptl4uEyu/Wd
+73wnxj37xKdCLsgTHfnmyj+MlSE0TN6x5tP1finghpvvSgMH9A6jspLfLDsfJWuBjXfwIgLiEc0ikIT2OTHI19Li2St3RWHjJt3bw484Kg/Ix417Pd5990Qe
++3Bgg1WJgVSlkQ05Mo6axyjOOuusmN7/fjbavn26ZwVyHMnYjazImUxFyACTeRgf36t98L5bcz0R9fLL/py+tute8aaiielf116R69OJMc8f//jH3LoPHjw4
+5/uDNgld8hve0zJ4bEG3kjzW67NxuFHpWqpHTsU5ypIIGHRVZUd+7itLtn7JLC3/qEyvBzABEF+6xMccc0yeDDFeNIzAK95NQuy0007xCuqXVgUltNEdRxof
+gbZzt4E5WOhp0I26DolO2azWCI30yUHZjHMTXpxHebaPTvavgXAukJABXuhM61vhus9Z20Q31z14YrJh5ArA3CQkBEFSjKZswlwNrGwjIgjEcBxEIBCxdbyF
+WYS88cakNHKn7fPnGkUykYiCHeBryocOHZqVYIzEKPfbb798H8P/+c9/8qA0cxB/0NgwYQIdcFMkhnRVvvvd72YGp0U3auhmI+Pbpv0zPfWZntoqWI8x80Xo
+BMgIqmBNwzdrFm/2iXUILZGy+s5W/XVNnn76yRiLbBGBoHPuw3vxZE3oRFfDJA+dUqX3lFNOydFNt+/iP12a/nTpP/JLFsm3OHt5GJGhkrEF57HjpqSzzjgl
+ukZHposvvjh3F40d3nrrrcyj8djtt98e3ea9cyD6zW9+kw2CrCpd8Luu9Lz44ovxsa9nsyHpEt511105+qOjV69h8RqwLUNfZUcC+ahbf7U2Fi0ldoJvumBo
+nCUH3GiZXhk7IT30wG2Z34r3gAMOSGYuR8ZyxVclk0V2vsAlCbg77LBDTE58LX33kMPT08+8EO9qsPBfnlsKsjIPNTiik17RxKbhJVfylNhulYt857UVxUMN
+Gnbre4tTvac++1aHHUtNuvdYdzQD8RqqihBABY1BFK4RBDESQpWt4xeGyakIjTApfWkIeMyYZ2Jcc12+FnkHDRqUWx3di169euUB8R133JH7wE888XgWGuei
+/MOiO2KGqTJKCF8+0CBPmXpvo402yq8E1tpsu+12+WPDb0x8KyJR2XVRadRa6Ue3bFkeuKuCBlMZXVJ7qFa1VCEPfeWZM2elDQPHz352UnZ0vBhj1GCDjoYJ
+bRK4UuWH04+Irsy2226bW7MNN9wgnXXuX/JaCoMXdeu4R+vFYMF+I9ZTRG9T28YXe+yxR3z79e/ZcQQCs4e6SiZpwNBl2mqrrfKUe6WlOhSaR48enfkQ/W21
+wg9H9LvLLrvEWtOcMNiXwgF6ZPrVqYYNTpGnKeEyGCdHePDHhsj48adeTeeefVpuefQ8evfuk+644/Ystz59+mS4YNVU6dRbqjpmg9bpTNawu5tuvDk+ED07
+d3kFYbLS4+A4cKOxypxTS37J0L0arNBYg7BfMmTfunc1v120TGiRKj1otBGAf+SGZPjwnVYYOFXh8jqFeKsBrnyKBKAaIYKUcw0p4ZqD553u6br5Up7HBv59
+643J7BuYFKXezjvvnIny50c/+lHuv9cukckEfVADv+q46jqqYGrlyqh7BIQOTj0tWiIOyQkeeODBMLZRMXbYJUcuMHXzjDe0ehLDq44kIGgxCcjA1IzTvOg+
+6XLp3t19173pnN+fkU6JMdj++++fDVGA+P9LVb61XKWV0U6cODE+EHBKOvjgQ9Nr4+Ll9z26ZV4Yp3L4o2BG+cD9d+dpYDxdfvnlWea77rprzCBulE499dQ8
+rqiTLnCR54YbbhhT2S/myY+K369pb1H/zDPPbJidP1LMwBmT5YOhQ0ekb+y1U+Av0/CMtU4I6B6xA/xxXPSK4PUjXHSGj7vv+ncE1hdjAunEvEjM2KUqFzxK
+DQNRw3vydffOOOPMeH/7k/mD07vvvlvQtW/YahmnNmwltBroYY/st9ILD3rlV7o5Ar3DgV42jT68VLsAr9qfuuyMTvgJu29sQ6QCWiUVFWKAksoiN8NCgF95
+1TvVgdB17a65hmT+pwvy6r5zcMeNG5cZ071DqASm6EJgNemiaL0wQACUVhnkOIxeNJHgAqMKHy6pV7R21p4mTZqUBg8pYwODVc7BOAwei0OVxUT14SF4ONwD
+m5C0QnAQtuvoEOTntZyDpevhfd933nlnxu2Pvr+ZOq2tLhOZViUwXEeldYMNNsgLzXBsPHTj/HZXNBRFlxevkA9lzZw5K22z7Yh09jkXZplts802efKFE5kg
+8FWJw6Ilr8nkBp7Nzt19991Zl+gylrOWpetcJ2ngkwzmBTJdZF08C7Mjdtwi5FL2VlY5oJGdMED6leiVfXiGyz37He9/6KW037f2zkb3m9+MzpMfZKtO1R3+
+6MAhr9oDmTmv1wLGoEED87vFhw/fPg0cvEnoxZPG5U1EVaZg02e1W/nOBXEOVWgr9u4evLXVV1c5B7zFXspTCspq+SpcjkgOZNBY35cArAEROkGIKAA6VFaQ
+swDulzDdUxcgCA0Qq3AxN/Wd97PwEAmx9QORV1eDoUruqUMQNRGA1k0khcu2FfAZqk2h//rXv2J1/qI8fWomDwyGqTvofdY1DRs2LC/itQkYPWK2MPMWxqil
+M4tTjZswtEz9+vXLfMIlspmM8Gy+yMrZe8TalS3/bTv1zY9GULLujwCh62TdymBY6/vPf/4zy4jBCAym8QUAcjagdkydOjWTqptRFWsj54KFZXc8OqphwgWW
+X4PeV157M97b/eOQW4qJgiGZF46NHnxKHJgjCyZ6Alp+snOfjK+44orUKwIO3dKBfEbzpz/9KeuSc7755ps5v2fP7und6e9lZxY4yJydFDmVBwfRK8+9zz8v
+j3m4nz5/Pxu/lksZO1WkasDK44sM6KReKytV3uu1aXOtOIcYtdvINO6Nt7NM0K8sGsCiU7DYlpaDzYHPbum/3hOg6JkcjLXYQ4WFfsMbcHuEbMHS+tEZ+vEk
+QIPbVCU3skOsNGpTfTzUmgXADBpxiMAQIA7XGOJY1SAoXEu1eEmUXekgHMm4wjafLycMNUy6ZBZetVwSHGeffXY2hoMPPjjPQFkVJygzPpnuoEnf2YFuDo/e
+gr5R+mR+2Qtmvp9w8INeAsI/XjgYPpwLCvh66eW30tzZbwUVPeKLFL1iMbBnmjdzRsapvlkdj8DfdNNN2dkeirUw/P/qV7/K4z5KN2YZMmRIMoX/y1/+MrcA
+psnRJ3EwvKjnUeouHa3Ml3cfMAD8oYmjOJd2HL5pzErOSKee9tvo6t0eL7e/NWY298rOAx7HIXMTNwIEw+NEtgtNi26vxW3jH0EN3wIF3k1akJ1WySGg0Y/d
+AR2DRu9V54RkU42WcbEJB2PEB5tZvnzOKh7JXD3dd/rBP8MmD3i0nPhjkA6zrrrLYPqyBR3VyRw0mXZmF11jLenjD95OjeMTNp7zIid04cGvJ73pSVDGI7qq
+zaIH7a7VE7ToEy76b5jUsyZVeyfuqUfG6Oa8sc+xNLNu5jejBnDAHJhFCGFCRrHKV4KVyR4dZXgmQTIQTPRerwuQOanjTThSNYZ8EX8I68vOJJoahDMKK9+E
+qxtD6YfHwF6U171jnJxu/fXXz4ZiYZYQMfnkk09lIyoBIQa+sbsXfZyIYBy2GFm8VYZQ8YV+PE97Z0Z8veIX0U2bGNts7o4tN/vEZ1XeCIoXZ3zK2Qa0RXzB
+nMGilwLMKnFoA2NOdvLJJ2dHYtCuDbbxZY1Muueee1cFmClTpobB2n7DyT3qEe+5iOl3yxFkZOxpO4t7nmPyDu3dR+0dC6135GdvLBFYryIb3SAzd7p7ZITO
+p+LbULYxCVKWB8hSyySRm8Bkit65nRA77zwyO8DkydNCJuWhRIbPeOi+7pEkS3U4Ix2RZwlwxfF9+pTc8aDbe84552SafJwazZwGfVp1ux3Ib8yYMbkraoZu
+REzKWFeU4GZfDm84ionurC+zajkvbI1NsQHOgl6/1eAtFJMPu1VOoku2jT48KK91rj0CeYKJbUWcja3jT4AwHMBbU0YKAEG4yaAAIhwGpW/s7Z/tw6OthRAU
+xLxQOeVFVIQ7RCsP3OkfE6iESHhqUgdOv5ghgJr05XWFECzpy9vqAq6x07SIqNaO7GYW7SmUo5pKrcli5MMPP5T7+r6qHWjyk5CzZ7yfnQWdcBIOHsFwHSTF
++RoxGzQ3XX/dP2KP2kZZoW3atI5NtCfGdpXt05ZbbhFOsH5uGTlNr+geSWhAmxaV84PLcUR9AtfFki+Zbat844uSJTL5POTtVwSt0/XKRlbA9DhA0Q+e8hOy
+bdZO1//r3/HpzyNzK3PN1ddkPZlSJ3/dadtrJDolT84F77HHHpvpdM2A0TV58uR4Ruqa3F22WdaakhnQTz8tH0Kga10auluxonRryE8QscWmTRgb+YrW68Qu
+BsnDd+yLcZIvOeHxyiuvzAHHrhCBxeSJoGTGUcA5/fTTcxmblbWy0vjx4/PeQfqa9s67qWuP7vmNvmvExBiYjJ4uNAJsFS1SmaUtG2urPXIUwVN59mycqbxr
+sjJzK2j9f63dCRzfc/048M8uzTGMOWabNnPfx4hcQ46fqBwpV78lqfyIXyLSsX45chP1JzkKueZIxJgZRY5KjhLDxgxDOSbn2P/1fH322r6tMdu8H4/P9/h8
+3sfr/brfr/fxUS+/CT3xPt50jwDJbyd2ChJtY3kF5LoQERGMdTAEgkHMW2/FvEs0DhAAyEtAAABwgghRuc03mMeOSoRnCYpZEKASYKT69pu2NBFbyWShKJQx
+EkRbImK222XQrCN1WIu6Idhg2ZgMU+hL/76LJaLAjKm1J28hG4EhzkuoLay85JLLUoiGxcCd9bDw0xjj8MMPjzofz7GIezWR2fZ7vgxwiJyB3/ow44m11lor
+Vz3Ii2FaoWjb18fttts2tbKtC8sNGtj8+d6/Jny27YOPe90jLNIb0U9jj549rXSOxbOhUS3hendqLM/q3SeE44F4E+GmeQ62CWICDTcDQ9CturBgloWSjB8l
+81AjRoxIC2uFCGaCSxO5lJQoGUux5pqr5T4mdMdIBIeC1ZdSqvAHr/7jGWv5HIwiLRtnPMDx+BCg5ZYbnFqcEmYV4IfVtoSJkJ955pnp/lbkFg3gF3OjGR4w
+ka3sqFFjmpVXXC7ut0EvtNc2OPAlehNgQu4ZGC2i1k98rL6k+7RybZ42WotPKNopU2acJ4iPlYFXSSyB0dDnaMvaJLsb6+jXVvIApEEVAkZmwkBKNaBj8kAq
+oAAhj7Ay4JwoOvqW30eZ9q0V/FSTr9qrJD+BrXsApFEIqASBJhi5K1Y3CDdzDcx5IDYX0OBY0kEwlKYXADD432CDjwW8xiOt0GoDg+qL/GmWAybl3w7CLzto
+jah/aK5WIETcN2U2i8WTNLYJREn/4UcqwRwaLoi+0KgYWHmCxe0Df+WV3yUhTll2ms480ZJ9tk7c2joeYIV/LnLZLiju2dPKkNilG0oOM8P15NgVbD2g5N1D
+rIaARqX99tsvxz8idDX2wLTGTBSSJLjQmbgvaAPORReNQFPgzBhBgKneKYyZ0Ugf4AOeWCv4deAkBg8bH+H8AemacTe5QvLpg7JwZDEtq2MMxSJRzOCzil5C
+J/QBi/Gd59x5i2rhGw7woHoJUCdd9aG17u38En6tMvCOh/GClf14QRvVFzBql2UDq8tEvfzaoli0q57uMqvYhTEAoTJMjiB+KwQ5ChG6119/OTvozC+n3gBA
+I5JGXosJsqWXXip2QY7OxaZmrnfZZdf0y2mHFqDuGVUSgCizDQZtArwzIbhojygdYTSuETZmqXS6hEcZ8EruCVWGiDXjJtiuvmIimaAWsvVJ/8FcLkG/vn0S
+QcZhe+/9hawLXAQKQWhzxGRdaMcSYBn9tljV6mkDYv2k2SG8nhd88kqYgoC6/8wzzzY95g8XJroALvcwoykKXkP0Ku7pWyiN6Detry8rrfLRZsKTE7K+wcsN
+zi0QLCC6wQ9m4RZzTYwZ3DM9QFgkcGoL7dFdfgKB6eFx0qTns51FF+kVfVkgztCIYEws+yFU2rewV1v4B07hPNemxa81YtOeyJd2Mb9UOGCJDjrooHR5uaDG
+wWPGjMkJZwoSDoq2YAS3oINlQ0NDadkWf+99f21WSqs0I2yOTmgGx+ChmEvpo7NzHfCxe/rreWtlW1e/8EBR6Jc8YMYnknIusBFiqbsjj+oFUswvX7IsEKTp
+AGJ2VsYFUqmOLhoWSkgQ87NW3BlaQIhxmf4rNmeedXa8Vv7UiMxcku4GRBEcgEOccQZhss1AMh7imrAohXAAY1zEnzkVout+leFL77zzTgFLDBRfeyn7oH+I
+igEtYISksqDKQ/CLL7VWhnUw1pIwi//mi4x51EODUyzcR4GEggO+IJklreQZuAo29+u3iBpFQ6td9evrmlVX6h+4NtZsw++WKAmUaIsFQmRr1wy0tSUtsvCC
+zWVXXBeW8EfBYJvnnBHtj5mkElpum6tSwYVBK4FdMvck+GDFi82Tm262ZYhHq6BMCVhviEn14x8hdHhGO3CKtq8ED4D1bw8/Fbwg0jsDXmW0jVdMFwgSDRzY
+rnSBZ213jiMToGkfxcD+ep2LoIukHCGhtIxZ8KcLjvCj9sBGecCvE7EIgbE8Vxkvgx//K6Nv6C75DUf6IxWO0IxcSDEh257w8uKL/5zuygGGuZ6B6FbzaUgj
+jtuVEOq119q3Pug4KVdG5YBbd53VmjNOP6u5++57AlEfTW3NohhYQhx3yViD8AhxA4z7ZpafWwZw9dW332Bw1e/qHHjck2heIVX7XNpoT7scHoFffTUGlXF4
++5133xeb854JmDFO+0aHAQOWbZ4Y99dEptUAwtraKm1lvGCiEjEQiQIAqwQOeSG84OuEsQRHXvflJ4RWtFvm88gjY5tHH3kgX4VJyNNtCPdN8rtPn8WD2CYA
+23kuG/rAoK5evRZq/vHcuLT4W225Vbq7NafmubaLef2vCwwzw6U9qyAefvjh1P7myaR+/fon7VkqlgdM4NFfFgCOXOrjoXguVP3OGxNz/87AgYNyr5G6tOsC
+B2tgOuPLX/5yjkNZ8FkJUcH56KOPhgs4YJoS/Fd4B2tHO30TNnUJiJmPq347KAVfYX7wgE3bhEQ/8CkBLGHTH0Il4W95i6bwrS6X3wTTc33t+npUplKamaYl
+pRolDLSgZ9UoVwmT6JRKhBJ9a0hlnnmFiP/cAnVtGOHWm0ffEmB1ySUqfF9J/TprsKldv9UhaidZPCkVAgHs8t9VvzNTfGjbPcmErVXPhRD3Lrvy5niP6rMh
+BL1i/86Q2IW7W7PVFpuGNn0l3IOxAUO8zS/6I91++x9S2L2zp+BVv2T9n60J6mY5uUz1rGAt+GaGUfmCU3+FmhETwRwh1X3BZXOALqiDYeGfBcWgxqVtWLyd
+gPYqSsn4r93Q10b1bDyUaFapYPIbPJ2Xe5UKLvSkMMzlyNu62XYAtKeYsjzcb98UJ4VbPFN1gx1zermy5DAVrvi4cY/nJK97+i+/dv323fnbs0oFm3zGxrbD
+m4S/509/S0EsPvUN3gp5+29zXylCAsBdgxMGAe59F5/4Xe36je/V4R6eVh6PeoZ/XWRFfV3WG7LhVA0BEhIJgAcIQcJdInbuISbfmCuoc8zppEnPpYb2W3xe
+4xpr87cDsmcnvdD87tYbQ8v9fdpY6YnEkTalAp4rZaJQKNsAVFLPB0ngkVdnhw0bli6jMZSEqDQIhLkgRdK+BY9cif32OzTmXjaOgb0Jum6x5fnqsKR3Z4hY
+eFjd8vt2CRETKBOGkvZdlapP9b/zWz5EoEBOPfXUZlBE2ISf99//wFhr97cMi6TrlvC126TVp12Eg2N0QVDW1DkNq6++SmxV+XFG2lg58zMFj3KzSp7P6tn4
+aWF848TFFlu82WrrHfJglmIeZ9GZAEVnPAO3hAmeUylM+z1gQCyAvfqK5rqYK9s+1sUNHz48x7ngm5ME7/rPyzDpbeL4rrvuDvf/Y7HNZu/E4z9j7I2P9Qle
+4KfGOGVZ4A3t9ZkiIHS+8YyLNZMEQPTHpW/6rX11VxvyKVP3uq244srDiyAa14gGVKKwTpBcALTLZtqDBUlq+ZbKq1CjgAV43ePPP/b4xGbgskvn3I+om1Cs
+QaO21C/5JozKG9AbmBYCM8NsPtSlTR1l5cwzcfEQFyKYbstyTAJaOkN4zPaL/HAnl44Q+WmnnZwrMG64/jfBmGuE67ZdhtgpEjDpo6QtzO9ev2lLR9zrvOT1
+f+ZU9+GT7w1WuBDdczbDccceHcuRls032AkoYNaiCyWgTsKkry7PR998d2zT/lkwfe+0pAbxFZ1Dw86k/YJBXfVbHnSWnzLlbgub77nnHoG7l5tfXHh1s8pK
+gzKI8ny8/wgDwmtrmWYcCYwZ1TF/WNgH/jo2FOdOcVrQwbm6wLyVXc5ctw+awKc+NGAlWUOTtIMHLxc0WC5PmnrymVeb/su0x2Mbz+MBZcr6+I2X4K9w5ltd
+eBUvw4VyosxB4enKlhzAcVke9XAP8SlrjIbq6hKrovNlzDpGgBBYZg81DBiA1OALY5aQ8deNF+TVYYjVYBEaYTABX/p3f3ig+fNdN8bAfHCEZgdExO2rub9E
+u9rrRJjyynUSWb7ZJfWAlWWj9TCTsYIoH2tqYlVYWCCAX83yiMDdeuuteZaBgfjQoVtnn++66/asB3PqO+uEuRJpgXTtSNWm+yJ7hHfLCM9bPT0z/JX38ccf
+T1xrWzI+MPfk1KLlYpv9xpsMjbvhhkXI21kMCIm4LJjxEPzzGPr379fcd//DMRm7d0wYH5KaWqgbs9oQWe1F4YTFf/SVSuFVP+DMNIN1guqneOBq/PhxMQn6
+UAj7Js1ndvpsKk8TtOrCULSyqRP5JczoEE2n4zok89JLL0x+wEtHxSpze4qkmXGTN9/nA34xfAmjsS/6Pv74uPAOLshDaBTf4GObhoC0S57eeCM23wXNWxhn
+DAvk0z4hKqulfjyHh93Hg5KgWR3UX4qxcAaXfut7dwUhpZL/HqpIY779JxR+139IJFTuA0Lj8vsvQSiCsFr8+FVW6Nd89WtfD815Ti4m5crQ6CYCMV0lCFdO
+KsDr2ey+q4MEpeZ7RAbfL4kYjYlIovkmjMJVcXD7lltt0wz/wQ9j9+mZGcIHYxFPfYIhNVbw35isfHPLW0wswodUmr/gM84aHm5OJaF97sTaa6/TLLzYwLTM
+5pCUc/QwQWbtu8bRxHBDKQiHOx1nwjMvNsPibAbKghAJ5tDeUuEPfRDdhamM+wibvhpzuG9LheCP+sFN21oRMWHCU6F42jkp8HuGpvjAm/nkX3BBVxvBozQf
+ePDvee7gBRecm0qg5tDwiKSOgi1vzOZDfjDiOdFSwQn8wzJZyDx8+PdSMaPlhRddksd6rbTKWuEFDcgIHv4zZaAefQC79gkAfKAj/JIDFx7WnnwSuD3X14Ib
+LISQcDMm2TMIxcAaMc8DKbQgraWA537XPb8JkcaEEpVDrGqkAFBOgML5AgsttEAifocdd2kuveSCnIMRUBg8eHBG7GgazOBsAa4XQZxThBc9OsuBC3yS+xDl
+u2Dk4pmUZI2vvuryZtE+KzQvvTA2uDDmc6aODDh6xNjj9Jy7MDdjDkP4fuDAgTkHxLUB91lnnZWRLtsURPMEEkT+OtuFL+veCI7wdCUWyvwTN3OroRs0V115
+WT2K7zhkPuaPYpPC9HsrrLROnn1x/ai7m1NPOCJcpT65Pk0GgkRQK+lrS6e3MxrHUgsGoLF2jfGsIhA1RVOp8MfVxiiLL96OHQjQ88/HOrVgPlbHyoXnnv9n
+83gc2bz9J7cIXukVFvKhZq1YCXHeuT/L/gwcOCgW8X6nueiii5rxMfaSik/yzwf4qPzgssKE5bAuzxgVf1q7xz3fa689c3xtvvGkk09rfnP9H5pPbL62jcXJ
+u/qNF+zr6tZtxsvqCImkr+ipPbxLgHzDAxxK8rpPCNVFiMAVbz1prYwHbihE0jBgFVQZSyUPYSqhI+UaxfS0oAYy5BllaVhzVFLN2Sy91BJBhDebldYc2oy4
+6PRYsvLpJB6tSPtZ/oPQ6sPwJQBZyQf8KCbA3MKyGKbugbXqpAwserVVgGbjKo0ceWP2T/62jHHD67m5jRsmFMzSmU9yuk0NTo29bG+Xhg4dmgEEYVxEN1OP
+OAhhoailQFZ5SGUpjJFYCHNnn/nMjrH+bPtmlZXb8wiMmwKYwHNrDWjFe+75Y/PLCy9p3pz8dJ7yqq8G4Cw8+PS56taOVQOioRiJ+0nQjRXBx7ISaszXWQ7+
+uan4oV+/ZdIysYItzdXaJv267/4Hmv/9xreaB+//Yx5sc9aZP0k8itQddNDXp3sHpViq7Jx+o1+5/PiQ8jW3aCkT/LtHgZlOuGLERrEB8/jmO0d+u4njFJJP
+8QNlMTksObiLl9Eaj0utpbGEyCGgMxa/4m04hQ9WrCLN8uOpLltsuc1UlcsIUJlVjJn9V0iFGE/CFPJq2D1nFjCFmS/K1jFFKi9G85ugKUOT6cSoUddH57+Y
+g0XzAgiLCUTIPG8Z+T8H6wnE+3yUANp+LahRS4j0CwzGAgSBG8T62cekT7NLYNdvCsJYSvjVJLFkA5x6K0pI6bBMNKbxkhA5HHOXRJ1MwIJH0lfJxLS1b1xI
+9zDMeyXhZAtnDznksObEE4/LoAAhAZNoJ7jUrx5jREJPwYHJvBfFAReCEvDNglmOA+cu9GKhMZ55G17D7NL48U80N4Zr5XRbvEAoKUiMjSYiuvgBT81Lgtvi
+1aKJ+vRXIIvFt3qc+0qxHH74kc2vLr2q6buUl17He3ljXsnYvlw1tMTHLvdKUMvyOMjUCg4CU+1Sula+L7pouyY1cVYROlaGaQOc3wqpDBL8VhHEkmL35IMU
+BCFIObYKwinr8kwez9XbCmac5RZIti7sU5/aOQa25zW77LpHEg/yuVjzIkQQWoRiEbguBMYgmsWzxX14jE30y5IUa+IIkf5Ahu+Zr2J4OKClWARuBJNeCeMp
+B5c0PotuG4UQuUldC3H59Nw/QiSvMtVX9YjaGS8SNvibGQ7/4VPKU3nie6OPb5gMZKcrqwKHykrq1k/CySKByUJaAZNzzz03J4Pds1Kj1rSVEClH82JKygFt
+JTDIM/Plvgn3/b68b7pvhEi/CZHEHSbgRRvl5zRpQ7JipU6OQhP0gRf9ZQGtGH/44YcziESx7LvvPs2E8Q9lf7TbMxb9ojnFgpZo9uqrkxPv5U3hV0m/ubMC
+FhK8WNKlHzwENCRo3P+ugClGEJZESExPgwKyxhLuAVZ+gPhtQg7hlHG/XAMNkWz3CB+A5G+XZbRHxwqhbv/JT4dQvRbCFm+zC2EsJpgbRGdP40PbkK4uS1xY
+H/NSxgDcRsyKcYxTtCOv/ivnu/NyD9zSxRdfktZEnwhoEdYzIXB1cpWUgQ99hgMCwi3k0xPqSvKxBvAsWe0hmIEpOmGY+be8ZUEHxthGgENfVw5XEAMRjkpg
+YC0JA0ZmnVjgQRG6BxNLw2pINLK2wEDo4abW442JYIzkHrg7L/eVkygRdXITubSlhHgb1h9K8Kb8nKYqY5Pk+Bhr2RTIChdPVt36DJfG2SKQAiWfiCOXjdNz
++DKNNwgQwdBv43xLhcqzwrNWibM8+N9zdHohhKiU2cTYksO9Wyh2TXMVu8qosrIqMgIacXxDBmAhQGUk0iw3wXL5T4JpaD60e8qyXPJbOQFg9auD8NEEIiUY
+7eGHJ4UgvZzLPixNkRBmXoQpK4kPDGPcYR2fQIE29QkcLqmYIP90fHhexBsx4sqYw9g93Lp2u4g6C1ZF1CsUa+en+S/P4FW/LSkS0DAHos4SFJvszCFVG/CF
+Di5p1v1vGVDezTbfOpWUzYkUII/BNgnMXAmeBTHMqVkpr00WCoOwFoRK0ia6YUyMOmzYsOlwEX7uqPReuPKM9TI2YQVsHiwh8syY0HpEadb9ykfv+1G8CK9c
+N0cQEE77lrjwFAj4PJcIDYGAg6Gbb5KvkTGEsJigpUP7DR7CZ2EB4SrFKVoKp3BNeeLv5YO21ge6Z9zIA0Bj1rs77QkIN1gUAJA0ANB8nYKmQv/5mACALELj
+t44yd2+8McMsIpB61SWfRKC0A/gpAWzz7sQc1GJ2LooBPRNddWahufzQzmOPPTYdhhKO92MITdl/5Uhj+c8557ywJPs2q62xfjKb58qDvxI/2wp1cySelavE
+pREF5QpKnrloaAEGsJV1gSc4xNDvleJxJnmWX76dpzL4RxcJE2AGCW3QiiCZA0NsS38kAQ/CJOmj+mrsR+hrDKc+4xrPJPDJrw9S/a4gijopFXRX1uWZqFqF
+wLPgXH5Uu6KMvAwbQAmpEDhlzspSnnCJFhSbocUXvzgswuKXB9ytRwVOeFtwwZZ38T9lpH/oZVxIYCh6xgB+9XXy5FfS1QO+MngUX8N5V4wNkaURMZ9CtkgU
+4BCDKASCFPrvmXsawQyEz4BSWURUp/y15EgZ2tgzAMn/cnRSQmSdsxyltlTo1LwmSDGnBA4JzLOrF3yEiG989NHHhhB9Jcdzf33gnrA6KyeibTIkCJI+wQPt
+6Fv0jKWRuHyYicDAG+ttnERjixYKLCgvGUdhdPicnRLRhz59FkvtD1/3R9QM8c0F2T7embhn1h6ylARKhEsCk0Qh2CBJGRhXYiAJDBIcVtDIfzhE485E0AgK
+IZLQXr+5rlzaLSPgUmsWZ4f/znpn/l1l0RPPYHRzSuaP0ESwyp6mmvS1/4pSWGaZvrFZ8KCI5F0c/VwiriXT5VOPOvEo/nOhv37ACwHx3G+XCK4kYCGvzZUE
+teWZ6HBZHsRGWEi0atY3SYc4yFEZRCpYAiO/uSJmUMPyKNcyWEwahkSXdsIkJNi2CzsLWbCFFhmQwsP1OPDAA7IT6i6kzYzMOfnP36+I08zEn1U9+gVWQn7E
+t49sjjr+Z83ue+yZb4SQn1BiWC4UIuqn/JJghvYwZSWaUxluDQ2KSZVXP8bWXmk7LhfXSFLvrBKc1LNBgwY2v7n2uhw077zzTlmfMRqmlwedfHM5CTRG5qax
+kubvWEkwc1Ptq8J0olymIGh0cEkYVJAGw5x99jl5r4TJt2SSlBIxHiQ8LK5BP4VinCLc3imMWWgeP+z30iZcnnfe+c0jsS7QHJqxGWUi3M8a4jd0+WRu8+/S
+jLj8t/nqmN9cc2Uspn08xrPtShEGBc+iiXrxrf/43pkTcMl788wcFIPjjJN4EP9jqznpIwBcOszBZBWBIU8BQgLgQm6UTqsUdWTFynbv3r6pG340OjVmwV57
+rT0jDzBSmUPAWO388suTm6232jA6O3+ug+PSfZiJlfAyK8tvKAtwvZeAlhBNCu165JHfb24Zc0ezzRbr5bgIskMXh2VrT50lHJL6iqnUj2kszWFZDHgN7lkM
+BO3fv38+R1j4bBVQG+ZXx9ChQ9PFzYrf56P6INDwta9+pdkn3BZCYDVFheMLruqTMLfoHMuB2SUMPz4G7WghUIGG4CI4ZcEtWrWolmvWRkD3jZJTY0nTvtP7
+DZ/oSxjNhbF+eAbuKWGKhhIxPpPeC//5cDYflCFcsaZwCscjI4C0zz6tFd7vK/s35rBYfHnRiZttlb45p8mTX8lxoLHuXyKSedNNo+PE1ovifLy102o5GqEM
+gjER2NFpoQXbsDlD496C8X/KlPbcEv+7GQoQFoIkuVlE8LsVgHYwpgOQ0F7t/ncIcz/4KRMmUY50c438RijAyMccq1M+QYc6EyKqTcDk/TBSEQuiaUtMI72X
+VXIfrOPiVNE99/zvmBOa2Kw/ZM0w3+0Z3PrSNEuG8mgPROSidSZ9UwcLxD3ip2NcLoZV2NwmETXjv4Kh8Fn/wSkkP7uEPlKF388+++f539wQ91mqPPqkfi4k
+AbKJTpKPdjVBSogkS5zkcdZFlUffhSIfgdT3DTYcGlbtu/EWjJ9M5xN5Xepj8RxoIhJogpRixvRC7mgvFW3yz1x+UOi8Gn2rLSS777F387OzfponTHElzzzz
+rKy99rxRBKYBTDILOpjvuuTiC2I/3H3NCssPipc9TIy30bdvTS+XD40ID74kPPDp2TvvtAsUDIfQgTHq6oEO66CCJNJ/yPPMAK01e+2bxBO5oX0lMXT/dcgy
+dmMeZWlndREYCZLtngUIf7RlHq+AWbQZH2+rQNhllumXVikLzOOHvrRtNOm6QOKsEgaQD6wPPvjXZvDKH4txx7PR7x4xnnkh+8ZVZbqb5tnUypjP4Rt8ZOWq
+nWKQcePG5fhp4MBwvUK7SywTF8tVB5BU/vqGY7j8oAmd5luwf1iAkxIWruINN7TjH3guYaj6MbkwuFUQ5SFoi3ATYv0YPnx4Nl9lWbitw60z2azM3XeOiZD4
+0Hj9zgGxKuSU/xCmwoVK4E3CR3jiw0jwLVFY66yzdk7ADgihkOzm7jb/siEsD6Qi6x4rQsaHxTX2NC797ne/m3QwKc31c0aiuS6LqC+95KLmE1tt0oyMVfQs
+Db7lfRAgQgvXhAZeWqXSBlPkA5Ooc84jYXJRDAzN/VIBd4ZGpokQxpsZ+IYiduZ9aIQpU96Jb0f99o6XSA2KQzv6xPN2zKQj3AX+NqSqm2XyTYrVu1SMGe6+
+63fR4SdydbEojMEt4IqYiaV5+Hivuqp+zzH6Gmus3qy31vIxnuibg0qIBDt8YJBVVx/SXHX1rxOptG6tmChGrfoQSQRyn332ybCssYgJYRbJs1IuylUZ3TO2
+ImizS9Ue4m728TWbXosNziVDm222aczf3J6nAnXWIT/iw7mxEReH/4/JCI5xkgCJeSZ0B1MnzpRR7oocAAArvklEQVRFf1Mekn1Iu31uz1gVcki4zSdn3fLD
+ke9KykhC0AIr6C519jlvzOFHCasIHQFfNN5v22epFXKO553X/9FMem5SwqFfPAN9FSq3wp4LTKCMnyhDJxdR+jyyE084Lk4k6hf0bjfvkQeuKfyZFtCuvATK
+ZRcufOFtctC1zTQ1fUSFypSZPJXRPRlZJ89K0xAEzFbWiwA4jRJQAMCA5pAQvEV0K82Yk6/qea4ja5aKybPfBAP3T2KafZfmBeHKglsyIcu16UyQ4rmLFhci
+3mbbHVIhlF9Pw7GkNLHx32K9eyWy+dciRZb/6Is6OpmIMJhLsarBQk2EY5m4mfK5MKfvcnm5dDQld1Aq2Dthnvk35aefhP/n55yfNBK9MtiG2846iqmtfnC2
+nvGL5VFgNQfjf8GjTjRW3oDexGb//v3Ckk0MEBZOoTKWsFL+sMO+GRPdx2U/tKGOStW+sSG3zhhOmhe6Vt2+Z7QX5yxMag8MtbB3nQg4sO4UgyAL5jd8QQPC
+TLmbpKYIudwmdo1rKZofn3p8c8/dtwe/thPxlADe9YyBADvawa/VOerVZ7B02XzoVlNFzwyKCYqZckKjUM+e7Wn7Ag2ExrjGoRdLLBH74qcJVqAmgdOABHCn
+g77xpiXmr+d/9eZ529P8WgwEuNZcvtuMuun6tETmBL70pX1j8vCOrAsMRZAiQP3PDB0f9dy3zjHJxkYiRxBbptnz0pyXXz4iX/TrRc0UweSYoSY8EKOMOigK
+i26vGHFJ84vY9/KFvffKVk1o2s1brloxUdUtk3v1v/Cj7s5E8SAYi0SgO8t05qvf4IcDxPzyl78ah3e+2Nx6x4PN5bEIeIcdPplzKYIeJlYJRLVXcKiH8tRP
+ilKSr37njfgAL4Vhzsm474ADDw7Ld2/whBeBtRs/F4vV6jfe+NsYW30nJne/m0qnE/76TZnZi+SgF3BUH6qtOfmustxUK1eMT4S8d/zUTs1vrrkq+PfZcFcf
+yDEf+jkKDW9z5QSzRHEFg4TlKTeK0sS0cZzx4e57fCECGU8FfzslqN2sivfxPdh5FBSo3wSzR4/WS+tK83pYx7giEsZvEd8SDdLlw2y5rmhahdw3HVNeeJNw
+IIhwYR3VRGgwIxNMi0rKuBALsqUxt96Wqw8WWmjB6TPpYPHc5Xfn/6rDM7/rObghkNknRKJFhKjyeS7/z39+bgrRNttsn7PaloGUEEGUPmFCixMnTHi6Oexb
+RzR7xNvTK4mEmTuh3dSt3qqbUBiX+V9JXS44pJ3Hh2tlTocQWf1AiMDVWabKdn7rp0SRsRRedym6eNVV1+R9S6EwGEWAFgWXh5buaN841TPtSfVbJAxcQuCE
+iPtEiO644w/NT844Lcp5o8eMOZy3AmfexH7MMUcFM/5gOt+U0ihYBTSMKblZEnxVAoP/ddX9+q77vvGSOgUM7PdiUR9+ZOy0rMEfPZdNHKO/+TNuLAttUa7o
+o+CP9g488MAUqlIy5p9YJumLw/Zu7rj9lmyLwtCXt8MjM35lWYuO+AMe4SN5ZdmPDhqugy4SpiDGx1TGQwAHmG/WxgywgrSasZJn/ivrQkCMqEGRDs8Q89Vp
+wuW++nWImU1ado3BXY9u4XZslwJpScsmMeHJJdOui3BzuwhF3ev81rZ2WTXu4dChQ3PVs2iWtlzatkTklJNPjXVxB+REqxORaB7MZKCpHUhz2R9knLhAKIAT
+jz82cVLumG8D2XHjxmXol+A6EcmCUCs0hIMLN+pk6e3E5ZubZyGA2lhppZVSM2I2zDI7QUJsfZGPxf3lL64IF2aJONnptXh9ys5ZhxNhhXzVD8djx47NBbq0
+uPsYkCanjfWDK2pbuYW2xnKsJNfVIJ2CPOHEU4J5Xw7L3G7eU4YVU/8///lis3acXXf++efkeRebbrJx1tvZFzwgYobu5tQkzyX96KQj/it6zfxcXuV22+1z
+uTRI4Oeoo44NX9g59fM3Yx+6O15APSzm1pbKCCVlB0Z0N1/GIuET80qEwrpL7jQDYDsJfGyyyabxYvBbkx9sUgS7dXfOrnfEGFgpMXD6DQ/61Z1AuKGxuhSG
+rNJUhEMHCBKB859LB0gRCx3HLJhcWb8xkcZoD7/VVy4jhCgDcGXejmeeS7QiRrg0lqr8IAbD3sytXeMNzGfhKe2tnMTFsTkN4WlbDGveiIbhw0rgAAOmsuv1
+5JN+Eq+e/0wcy/WPFI6uXd9KmPWX5a269eOW0SPjGOF7czJVPWCXCDUfW7uECSEIlm+W5vkYS10+4vLc8yRczr0AqzGjMDR8wjfGpSm5iP7DC3q8X6o8NGIc
+ENBMfPqZmIxcI3FvYtsKB5OoYBNIMR4wZjPpS4i1R+kYjMMJnPpPoEwawxX3R1TLWMPA+onxTjcdGDC2tCxlW0y10867NSee/MvE9VE//EEyVzEbnHGFCbB1
+iIIxnelPf/pTMjfrgX86E9wSQgIIj1bsb7HF0AySjBp1c4Tbz4kXCeyQ7Sp3XyiX3T//ucyrP8ZJNWanFOwwEB43XsT78GHoYi2kNyEKlx94wFdyk+AWW2yd
+Lh1PyuSr8/n0F5+QAUIk2IZe3T3A0P5gWESCXJmNnTTCOnmGESSMJp/juFJig8EQAnJL6Ei+8qQdQ9IAGoZc7fmd9QRwk55+LOYzVsq6aW2CxC3TScxZE3qQ
+iAlE92oLOcbFCAbPtCrhdhEo4VsMCgaTkYcedkRo8HNjEecOaWmKCUrwaWlMRCD79l06tsVfHIsxR+QAthgf/JL+YhDMj8guTMhFdn+p0IomA+HJ5R5lYfVC
+waYeZWouD47knZ0gKScpK7351jvBAJvk71GhVA6O00vBgzHASVi1q2+UkGiVMDhGQldehqgWutekrgAEZQTfG21k5fbJyRMvv2y92Rvxf2owa7uO8l//olxf
+bz61w8ebU864OI///dGPjk6+QWd9x0/GJMYjlAoLgXcI1vhwJ8HL/RKmF8qHZxbeIlwTrCKOA2NKgZKyhk665NLLYg3kutkv/LbGmuvHa25GpQsuakeJGAvx
+UrjP+Ep/999//2ZMrGhngeFCWwRO/azVtttuk/UHJYKX2vPtwaovVvzMN6Vd/obXBeUEJ1KQEA5zQ3S5SCrn2vHh3cMECkLIWxGdo5nc99+FCST3aF2Mx2Uq
+BlQeINFUEhfDeDZ1amvi11ln7bwvigahEmRDPiT6jWmZY24UZlQfAqgbUkTICJF6EctAGZFWXXXV5usHHRKW5cFGYEEEitCDVT8hlxlnbdXp9zXX3RLW68jQ
+YDsnLDMzN4FhkSRl4IDmNPCtxI0aMeKqZoUVVwgNP6T5WGh5DCofhoUDuIJj9RUOq/zsvlmkjy63RvPaG2/HWX0bpiBcEqsLfhoKpRKBoQD1V9JfeDTrXys0
+3Le8hmXgaoIL/sAjcude9DLw2u79IRjcen2maH3Du3Hmrp/erPntDbfEoS3fbo4LYdI/iTVhDQQdKEj04wJ7zhVXJxeURYU3990j5NzhSvjg/PPPi9D7SXmY
+5jnn/LpZpu+SAccrAfOSYYV/G8I6Li2OSWZCjyaE0xhRG8Z/BNZmSm2wzISW0j3//PNTUI8/4aTmjJ+cG9M6/ZIvil/h791u7biccMElBdS9ZbzWzWoDBG8m
+YQGw0ELt6lhjh1dead0fHRL6XnTRBZOBlSFUNJTLb+Fy8w4LL+x42PbtBQDGjL7d8xuB+/fvp8pg9hg4huY0H4JBi+DWiEEEgAmI8jMzgfI0jzcrmL+pfNwT
+roGx1vjxTzWrrrx8amKTzISP4igF0qNHrKmKqJ1+v/zy5GazjdeNicf/STirPu2UQBHQCquDScIA5ookQnr55VfGLP9pTY84+PHtfzk7bUJaBAxr/Rni6BeX
+gxBL7n3Q5PX0Tzz+TDNkg5WDUZZPQTDAJpiFP+Ft7gqiV3u+qx35CAIPAKNVgheRLP0U+frGIYeGS3xCTBN8Mt0d5QmafsJj0fell16OMdMqzZVXj0w6/PD/
+hoflXCIFSX75yptgLassemB2V2fi2gsYsaSScY55Oe2ut966gbBJefY4RUEhNs0z4aHcFAGX/bJuXg+vhVtpPst/3okonkW81hdSzHixPBeBkU9uv11z2KHf
+blZdZblsF93Jinz6gCfgwH/fsfr79bQ0OkKyCIKlO2+84dXprbWgaWSGXN+uYkQdqP9cIo04QDBYLvMkFPEhj8Z7BNK5VKJhiy++WHPltXc0X95v/2Qm+15o
+LUleiem1grczeVYXOCT+NW1CSBFHovWZdQjo32+pZuxjjyeyMS94fOsTS4uR558/gh6x2/Ev997ZHH/c0dMZUp8kZUqQEAVRJfX4b6yGISVKgRB5WfBuO20Z
+d14Pgj2ZjFJLlqrednrhI1nug3wUDKYZFuq9cDNk3bWyX6Jt9Q6mqtt4idBKhVPlPa/LM67NY489Nj2Pe1xdsEnf+N+D4h23G8R4I9yZwFkbEnameht9pYDg
+Hk0FINZfb9VcsnPaaadnedvAW8vWrpgHiy3wdQ9M7hFsdRZdRQ15IZUoHYqVpRw4aGDexk9oh5d3/NQuzSmnnZkKkTurTyXsmJ71YfVYI5aQUqZsucnGVDwb
+7iQvZu+994jyTyRuKRt8QvjxSgkQ/mc1k0M0REp1gL/uoXERawM5OgdRvmk2zOf+/JGPufebtmGOPXe/hE4nJXlIvAGbt6v9M849u/Y3VzUnHHVwvBnv2Gwb
+I5a2AriOCefSqJ2pGMA3JpZoMm0IVlTSL+0jyiuhHHqFxppvvjgaOARNH8s0QyLYabTbbhsdk3fXhgZro2hVvzqLgf1WN/gkGg9DEGT1SmPG3BarDgblYfci
+f1yjsWMfy8Wr9vwQcAmDmWcZGP651NlG3nifD23FuoVmyaWWTCZkvWleST1OJoUTOH00GGpWddc99ON+oXHdI/C2X0sE7cenndjc/vtbmt6xkqV79/aQSrhE
+a/QiCCwSfFO+ywxYpRk46KNZ3pQA5pSUwbzczoK3aArf2i+8i6pxBbm+kmeWNI0PS7la1Pfzc84NT+SSqHO+fPUMfnz4oXtjfHRHWLGNA79/T7pqTx/ABk58
+rm00JKzm3QgYF7c2Mh500AHN2Efuj/LtYTLKgRP8+gsWcuE7XsbcTjzS2hJm1xihcs9vhZhE//3u1QtDztd4J+u777aWxpnfGkEIkiv5TYKVVU5+luiaa64I
+KV48GOjuCBwcFG22YypSzqpJQrDMucgRzVidyIcdHzrhGXj4uFYISOAeNmxYjEk2yHmim0ZeG+20Lwbg+hAil7KQgQAjLr843hh+XE5sqkPdMyd4kcw9OIVI
+IIR2MwiuPUoTJz7dnBCRwXXXGBzvK3o5iPZys96QdeNk10sSR5aoIKrgCOay5KhggcMPmmjh7be1ca9dNcAVU6dzDQQazNgbh9De++9/UGpRdVcf/K4+0qoU
+WS2ctRJAPRhMUobbfeJJJzfX/Pr2tDwEGX3LRfMN7xQSa/n0hIdijLFc0pTi0edKQus25KEbGhQc9bz+E3DjooLLc/yEZtKX9vlirs7o1WuBEKiL4/7rcUjk
+Js2PT/9p4LpbRPd2SpfVlApvgZvoG04EMQi48aCxtvk3sJjQdnjO6quvFng7IM7pezjga3FQMoC3XfDCQsVW8/YoLZoLgBiqtIEMMpNAHXPJx/0jGBBXwmbB
+qrySZxpgDSBK+JEr8GJoq6uvurU562dnR0TsvBiAr5/5lNOmLcTGNb4xu4gRl00qxOafmT6KMYxPRMpsnRDiFLpdffU1kpms3sak6nnxxfaNClxZ4z3If2Ts
+483nd9+7OeB/9s/aZ0VcD9ShPeMjbufwCNEbh5ivUkYaM2ZMM+GJOHAj5qHgixW0hu/CCy8LAp2d7pdBLxeFRqQw1DknQqQduF1xheUbRyzfEdrU6u2BYdlE
+tbi16BPVxkubv9fcdON1wYwPKzYdTr/hA9yEgiam7blE8CjIg4aeF473+eKwcBW5eG0ov+6rB/yUIXq/GpE8qe7Zyl3WWr3GsxSf9F60LXwOHTp0+hZ6giqs
+b4WChHe42Gf/7P81P/1/ZzY3j3IW4Xwh7Ffkhj+BBThm9QkyHjHm4+WwtqZn4EkAyPRKeT+ESj8I3cQJjyQe9NU9NAWbC37ITZdNNt1yKiZmRWhFiRaAEIkm
+8buEi+AAlAWKQ/Hyvufuq5glUQ/kyGeNkjTmlpsiYrZLM/z730nz6R4kQHQnIvm0td28kKUDnXmUrVRCqIO2iFuKxDpYIUyTMfGf2+PLzfrrrpbv8ZEfIriu
+yvTo4R2i3Ztbx9wUrtejoZkGJ1zV32pn5u+ZYUJgCseYYs+9vjgdZ3DimUSZ3DjyugjDfjvcib3CQrY7g4X+4Wpu0umn/yTmb86MMeASzVE//F66M+oB31/+
+cl8oo+/nZO1j4yY2hx/6P2EFDs5mZoa/2sZYXDy4xyCd+QrXf/7zvWFhdwiBGjI9QIOhJEqUAl44vJbRo2+MKN3RAcO3c+U5WgskCYFjbsIOP+7PKtUzPDUo
+xjbnnHNOhrTxpwhtpYLL/3vu+WNz+BHfifPQfxfzS1vnYZsTJz6V/bG1Y2gIJV4V8WRxnTXBxdNPgk2QjJH69l06ghlDwqvZt3nmWYfq9wjPol4y3XpAKUDB
+5wJrXT6+8dAUpLrJLBsv6URpEBKLGTAgADSK0SCPr0lwCBzGLOvlWffu3Zq/PTS2eWLcQ+G+/CKsyy7ZCeWlmYWjk2iev1c+z6RCICVw3HEnRlt/b755yMHN
+wNDK4Ln55tHNId/6YQjRygFbG7oHn3L64x2giwTBf//7Mam9TGIW8doW3v8TfAWzbwxhHsfWA0EGS0zAUYoK/vr0WTz8/VeaiZNeal587pFYrnJQTJS24f73
+b+3fn1a7F154UbP3sG81n91l83wh2Gd22i2UyIAYJI+LMeh1wfDrxUBaVLCdRL/qykvf01WuOqulWeGi8rCsImPbxpzc008/G3SNt6wHg8KtfsKFNYqjb/tL
+M+amyyICOCQtuLpZ4BK8qq/anPm7nhuHWqXO1eSloGM9U8YQI3g6eYoyu/jiS0N4vxKrM86OUPe+uTmRJbTaAU2Mmcw9SkcffUxY8yPyNyXSJWD31slzzzu/
+OfaEs5oN1l0xFF1rma1+dwwDmuovb0aUusu66204lcDoOK0oA4EgWC5zRiZWAQ2xJSzM4kvx/hshY4JUbiBB5LvzYVmhL/z3Ps23Djtk+kCzmD+hnsWHNgpB
+76WpFKt6vHn7sG8d3px7zq/iZJ2NmttuvcXTrHmJvis2Q9ZZJdd5ERzjP51nQf1fIiJAI0ff3Rx/9GGJdIWq7azgA35UGQz0jUMOa24YOTpcrkEBY3uwpuct
+g9kx7EyA3snMF//qlznhu+uuu8yRAAOrmPz6GBNuH1G5XXb9fMDergx5ZfJr0f5yqcgM+jEOl/3mUTfEMqXbcqxT5WfuovsSJTezonO/+ipCtt9Xvtb8/va7
+m0EDB6RHoywcsPYuzK5txxof8a3/DZfq41nnnyIoYm3gkd/+VrjI8DRjOKGNmVO12Xl/Vvc876zrzjvvinHQhtHW1eGubp+wd1p+q+VFBM1TWTp2cMw1Xjri
+5mbNNT7a3P+Xu5rBK6wZeByYARRyoW8srn4yLuSDQsDr3e0lctPs9FtvtZEIA0+ZNEpwjCUUnmGNnBH+z6ignQdQkbwQ171b7O+58754QfCTOfP8mU9/Khsr
+AgHo/dL7CU+VK2RxD7556BHNtdff0uyw4zbR0SmxfGSrUAbds9PBbhntoa3UC04CxDXwcuFnJz3fDNvzU+E3D8uq32VVqLU5TEVUs+JnnH5qs3UshDX2nDKl
+3bYP+RjKpCHEvxBvDKxWKio2h01Oz24xsEQLU2oLLBBvWwxllgPg6DM6UhwCRE2XpWJB6pgUpPfC83vdrwYJFwWhne8ceXizylpbx5sZe+f6Nq4PpVweDJ7g
+sSwWIfrddtu1qmi/u/SJcyTuCyt6QQ708cd7ta1Nz30Xrmcl5CrGX1WX9yeZEzo5zgE/59wLmg0/tl7shH0y+Xn3z382LaOAgwXGX/nqgc3fxz7Z/Ne2GwS+
+3shXfcKdiCu+phj0Cw+hJ+OBl/zmEXXZeJOh+aIxAOo4pNPcCrM4GMIzhMEELA5g+a0Y2mBdOZE3SPv9726JSNw3m/854GsZsdG56pjf85LA4YJwnT/gwG+E
+//ps06/vUrFDt13z1yPCoJhWh0UInQ2BuGCFCGFUUUBWk9t5803XxlxC+1rH2Qn57GA/5tgfhZY9Ig++ZO4hGLxwCUcSHCEQmLzMTNBBxGhOcVT5Hww/f8NN
+dmy23mJItBf7kATEw/poW57CGcJLzzz7fHPbmBtyoO3ZezFkZn6fj2r/yiuvanbd4+Bmh22HJANy555//oVkMnSCd20Yi8KDflNsr776SriEzzWvvzmlGTXy
+6lwiVgryfZqdo0ed9d0cgaFP5Hzk/M06667Z3Pvnu3Kz5rID+kaw5tZYJD0khMPb0dshQDu2bwUYvfSFbBAceKu6/YbbFCSCIyNhIWU6DlEWk9LcGFNBmpx5
+w5D+Cy9ikPnn9yqPsbEO6c3m7DNPyvAxpKlDXXNLrE6sAVhSl0Wk23/yc/EWuaVTWKwZA3/rx4YLGhqTUCzUa9r5edEPCazgtxXEoF8oVASnmCIzzeFHleXD
+D91yh3xvLmIUDgm0PPAKJ3A9efIrqRkH9O8Xe2iuDEvfO4kzJ3iqdp98ckKz6257xQsLvFql3cQGF4iPTpSfeTIKhJt+U+wf+u1vr49AwXbz1O8SQvg8+OBv
+NCOuur5ZafmP5pHUYNMuocIj+AcD4hk0oGSei52s9rXR+Pc+ML558C+jYxXBKvME06xIV3jyTCDlxxGcOeboH0aUdXvMFHOAT0U0b+kUIPRxoQO6+Ta/CHYW
+3zcPDX4LxxQWXstwCcsjo4KplYNpIYgQIQgmAJDKVQSJ7XhjgXj2dnPdtb+ODW+fbf58zy0R+fh0AlMdmBPmmBUi3CuiqYuPv96668b7d/rnRFlLpHZzFW3n
+Mg6CAALl1SPy6APGWjLc1htH/r457/xfpBCpG1LmNd16W2yZf/zBxJG+014ubfuuPmCmxRZbPP53i36s3bpc89C4ycLei8a6tHBnMS9GKKYFhz4/9VS7QNVY
+d/14Edd11/12miJsx75z0zxaqF9bhx76zehPWJ0QEnhn8XkuXMBSIJQ0/nGZu+QqcaFEx7bcbJ3m45t+srn/gQeSFur9sBLawr06RQytTLee8977H4nJ+2ci
+mvfRdLnxM0NB0PUNHsmCy9ymfqbARH2ekwu0lZKHIJ9LwOWQZPpXSFm5cCqRMTNHJRpSKW0zctQ9sYnv9Rhc39j8IF72ZLkFgCtvVjiPH+oDk0STDh26eb6m
+A3zOjdAZK3AriGDvCGadEorAPqJecTZzjokiP7/+iQkRBj78gGbPPXafR8hmuKxwdPHFl8WLpzdPzUbZgKu0G2KCAVzw3C3GkU89+VIQcbnE5bwAgqBLLLFY
+WBz7w15OwSE8FKF2CbF24dBYd7F4idrIm9pzMrSLVnOb1I8+DtC/4PyfRMj5hmCwduoDj8ABOLhJ+s8FAhshUpZQPf/8P9KlEhRaa82NYrX/Xz50YdL3gtXv
+bbfdpvnjXTeHVdo83Otr0mBw68gB2hF+sIKdFdNHF9gpLr8l3+IG3NaukKxjfSKCRSpJnfVwCkEEQmCI7nHffw3SJE5e+b/vHdhce82IZttttp4OKICL8eeW
+QFUOoOqThHlFXnbYcafUBjqIIQVGwIhJdNzbLiBigbBKtKBQPrjVxTdfJBTAwQd/PRnYvQ8DVis0TIraXAYWdbKMmAlBKCt4Nsnsvjk4pxJhQGlu4Ci4Kbb+
+MdHYtYsNk+3GO7gIEBIOCgYOwZLaNsZQr7zqrX33Ztvz+lFw4IHvfOd7zR133R/80s5LWnNJ0N8JV45rKTgFB/gMTricGNM0QYh0M2T9tWNX7lZ5mGUx/rzC
+11lenRQHfJsMtwjXGO/WW0eFNfx78MaMI7fk1bcWd22ovf0d+O7fP3EJn5QoVz6PLCY8GkAATGfXKN9PQYTABD0DITT6yBuuS0AsJ/nud4+MSbW++f/DtEI6
+zy3Qvk6f8ZOfxgLCvTIaBukCIJZ1eEaDgA9smLT3ou3B6Z7xxXWaIqApbxz5x+boo74fJn7JLKP+uU3VX8xx1dXXNKutvl4qIQwCDrgEW63ho31pZficEvDG
++To5ITm37Vc5tMOQk56Pw2dikxn6ga1nz9gwGYoFHfUfXNo2R7L0kmGVYoU0mOBA/rlNmA2uSe6BsVp++UHLpKJVrwl7CtncC4UiVIn5WCj3wVdKDiwmPTfe
+ZK3ctXpnrEQo+s8LfDP3q4QDzHC3006fyYn4Hbb/RKyAGREwcO1mWBt5uMQ8IHgEE0UNr2BGY7jNt1HokIfmGiRbdYtRPcOEQsuE6Jhjjo2JvitzCY68AFJ5
+aSb35jURDNaGdv/hD49uvnnECXlqDUroDCIUExjbgXXJJZdKYpllrrV570Y94Lci2ZKRyy47PXZnrpeMA0Hzkoq4Qt6XXnFT7JsanAKuTiFXcMqjD8lU8f8f
+cZ+VhMvV1mg3As4tDPBdMPTps3iua0NQwqr/2hVUca/wZdkOS77Qgj2bc35+Vq6M1n7VM7ew6B/FR6BPOeW43FUsigvH2gZLegmhjDGe3QXaLL7zDM3hxY6D
+DTfaLPZX7RyLSO9M3Mk7rzDO3DcwqxP/Ws1y0knH5yTuqJv+EIbE8qdeCXO5eDXE4ZoLLFEAaOA5xdm1VywudIN/OGXKu0mIBeZnhdq5AhXeMvqm1Ph2HB5x
+xOHpTgEAIAD6MFMrRG14/RvfOLTxot2ddtg0tYI2Id38EYZkbbil7tEQyaQR/Hj11dgO0j1WhgfzckUfeujR5vvD/y8OxGi3aHwY8Fa/r732uublF8blspra
+ts99of1pX8yCebg4NDTEK7vuOmtktG5eYCnm4qdXwoyIjYFpUrQFh3a9nduqbcIl3XHHnfktz7wmig88G8dK8x//+IxYmnN9MmMxXAmUaZPaaeAe/BB+gQDf
+Xbt2j+9Fmp123jRXrY+JHdOF6+rvvMJa5fVb3XgOjj7/+d1iPeKdzYrLL9dcecWl8czzNnKHpvqC15zlAZ+UpXLo2yVewjSVdqc1uCEemlhbMFylKcG4t8Ua
+tFNOPa3Ze689p2t6HQKEK37Gd2k1BOEmeNZ5r83jmfydqc3X5icotJiVuQcf/M2Y2L232WLzDXMgTUick7d4jOW0zxLJiyloC1qhiOK3unRw8mQv2F0gXwBt
+HAgRylUfCn4w/fvv6CNfRJektlv5E+LhyVyWNWmbbLrFNG06NQWmcMMizD//AsnYxijaxHCTnvtHjCu3aGx6AwtYlZnTVPgSxrfyfKOPbxp4aN9ACD51wwna
+StwTzIDO3N5u3bo0F114fiqjuYWhE+aCx1aKPff67zzhSESRkoYzys74qPBDCYITTBLayAdPGFh9t8R6vdGjb8lzGtyXWr7rZKSWf/LhLD5m0HpGednqvt/a
+kggWeH/1q4tj+/2Xmr79BsdGxdWThmCrPhYsBAqeu7oBscV8KkPTyWF+lx2wTKxsfjCWTnw9hagayoLRoA7pcPsttt5K+H/ea/PQkADtvNp77XP1WgP1+d2/
+0Fx7w63NumuvEh1oTzei4ftEYAFj0Ljgtr4L8sGOUQ1cPUcw990D28/OOiODKeBHuIK3E/7//B39m9Y3zzp/q0O68aZR+e2/tlxSuTLojkn0iyYm2G9FpPHP
+f7y9WWnFFfK+/PCRbWhnDi71SrR59Cz61vYXrrQLDxQOXGBgRKdB4a9Hj24xh3Xb9BXhcwtDJ7wFD4ty/HHHhFVsdxSAB4+ZLwODyz1MCSfgM7dGIVKMYGRZ
+F4qI63b/Fa+F2XrvXDdZbelz/W6/3XnvJI9UZSpn3fe/eBJMxtv77rtPLvrdeKMhsWnz0YCnXUsqH34DvyGPvhibdvdAYZXqkP/CeQ89Mi5CxLvlDDifEONC
+FOZopXHWs+KeFYDytr/zR5bL59M65TlV7x7kcdm+8tWDkmm329phiSyPhZ/tsnXtcweKCEK+mBiRlGdZaXzLbjDR3x8eGxZt4+xsbfjTpvZaYW/7rs8JSfjn
+AfF0mFrYMfe0PscPCxa15cTVK664OqzRVlmGELVIjfBuaH4BBeUJELyKLib+ovyQDTaLflh9YTFwe0Z2iwtQwFl+TmvX7xan6AQ+9RpL+M9ltCN10T7LJh5M
+cOofWAgTWPUPntponlUGcfZgBENWXX1gbstedbVVs19gqLa12rbLZWu1dd5JmlU+d1omLRhb3KLBUqEIV8+JZwN4cLb8046N0IqFBAd4LOvq3bt9GwqYaX8J
+nbbfbr04FWmrnGYRHdTGv6eWZokXD6Y9b/E0c95/Lznzv5IHbay11pox5jux2fHTuwYePxIKa+kcPqgXH4Ibn+HTLltsue1UpniRMMFvB8NKiOCaMOGp5v77
+7ok7sft1sV4R7VksGeCJcY83q8e+FZL5eCzPb6NQsRnvhfFNjwWWad6O17lEvJytjrLqDDesp8M+woS++XT8j9Q9dl5OsYkvTh9aeNlmicVjLdNbsbdkVYst
+vfjYXEzspwkadg/BxjRgorGKEUrbG1zrOE0rj7wIZOKvR4+PNPf88dFmkd4R1Zvf8qEp4bb2DKKFGxuC+chjE4LxLT7s0Tz3QpyYFANx48OXXmnD2Iv3XjCW
+Oi0b7uWLKRCYggA8Hau3N1p/1YQFrBhF2yyitsEE4c4DAG9p2lIEk2Kd36BBy+Z5A8o47ll+jGkuTBluISFk4VoF12OasmhdNsufMN3Eic9E/taVgwd4UV5b
+ymFK4w/Mqy1wglcihP37LRPPrMYIJRr1AYPrRzgwkHWYxlfqRBv01i4tTSHQzKzhfPFcYXhXL2/CGFUf4I1ChCsury0kYLV6GnwisVIxst/6ASfgEYX860OP
+Nbvtsn26osSDYJovNPa0RjJUcsKpDnW/i3lkjPIm58FeSVlrK7vFygvKDM+4VwLsW3557rjjrqQfhV2H7nguP7pqq8tmm38ix0i0PARLKtUJYcrS/pBJcN6I
+OSTIRjgMh6ns4e8VQQllxNRLgyEa4YR0nVww5jlyYWhoGR1wVhizCEBEhmiIeSMYAAzFPJ5zUcxF2JylEwl8IE8bCCy/+2AwntJJ7kx1mNtXE7iiePUbIcs1
+ND+mzxConP9LhxZSD+FQt2SlsN2+8wVzSODELBgXDsGpDv12X10sEg0sakYwwAx/AiLmW+RVBr5fiPLu8dXnC0UADu4GuOEJPNroGQy8aEywmpfStqQe8MAL
+HMGFOhHc/RKgsvT+q/+pEMYll1g8GVobyYGBX4fGcPHVJ4w9YEC/ZFZCjLaUl76ZMnkppyZiB3XwgrMv4MJz/ReIIczGRaUY3CcotDp8gA8tuHhwpS/oLp+6
+4Od3f3iwmfrW681Hwu1781V9fl63Q9EPjIUEMfn7xluxanuZ5rFxk0IbPBtP4tU0vfs0PQOe+YJn3wjBfzXf2/VOGIaFQ4H3bv4V9Hg7FOyT4/+adfUbsHKu
+FiHB+GqZvkskLuHx+efb9uADfA6geXXyq02X2Jab2yhkSimOgjpKGPzXCd8m2WgpndFpCLCOqhgew7z2Wrt7kG/baqH2pEoMKB9iAgDCWBuqwn3I044ykjbk
+ARMN4H+VlVd9rImDzFtGaDU25HOlfFd9OgtWdYNbX8AguY9h9IMLoUxpRvnd/0gwsrPMtFN1goFgUQJvhzYr+NSNCcEttRquPb/Ab33CNGA3YUw4qm3PWzwb
+fLcnvvo/JbZdcFdbwTMvNiDD6zT7m3G+OqvgSF0wgFnfs1z8xrQErpjUfc8xN8HTpv8JU9BXBFef6pk+sFJwBGdWkNRUCNzBhzoIq/us1vzRJ/BTtMY5eMlz
+OGOlKSqCCy7/JXCpp/Bb7buP7nhP2631ace44MFvEqXcNcqjtfxoUHymrp5xD77hATzacb+inS2/B49HHTMsXSwtC9yCCw7wVeEP/2nDf7SG926rrb7mcDc0
+4iFgPERgGTA6pIl4FQC+VQZByvrvG4IQr5OYkMCHVFfVj+i0rTpoV/Vr338XhtNuLUb1G4Ehk2aCBB1Wj7Y9R0B5wOG+OTH9UC94qk7PJcykrRI0z13ul9DO
+F+4G66MOhIFURGExivAVefJcfe5jTHWBwz3PME072dclrIjDQ1r3FTwUlIuVgEMwqadHuEsiqPJ+JFwsFvwfMQaCF220Fs0BMK31Y7nhB559gwHsLT3bvoEF
+zjz3rC448Uy92tamRcuY1LyPe5Ln6IWe6nG/mHORsDivB13V88wz7Qm7FBTlpD7aHEP6TTirHThSl377DacEj6Jp2+2S9+XBN7ajwEsKV+CNd4EnDFHkoaBf
+CqsGl6mUpj3Dr+AmGHDnm8UDr9+sqnqmxDOKcnJYGuxSSkM+sINRPWBjcCigbssNXmE4ZHoIuZLKJAhEBKb7zXDhuG18ZkuIIBTxdVo5k59WP2gIAUFQHcfo
+6vdt8hRAGAjDuiefqBjg6h6YdEA+SPWMVdCuMmDEXHWaTb65LRZOQgjNGD8Crna+RNvylyBrH4zq0T/t05SW2NBS+qM9bdNKtL4VASwDl0Zd704N5WHcEC6D
+PrsH3kJ6Ma9+qM99DKdNdeuHvrpX/cQY8AlOV/U13+QRFhwsU95pXwz8kZ7mhtr+6Y8+lNXEkOBx3zfrVX2XR9/NixBu6/MIIhj11wUeZSkSSd2EtPCGgQpm
+bVGGnvEy4IK7rG/6AvfabsdGcdJPKDX9t1IdHOpmaco6KO+CF3BpiwDAo9/cWZFPv7WtbrB7rm7tqNOYkxDpp2dgkR/c6tdP+cDggmvf8IXO2oYrl7bafrWv
+voQTefWxZ9CMIP9/0Wj1ClQSO/4AAAAASUVORK5CYII=`
diff --git a/ollama/integration/llm_test.go b/ollama/integration/llm_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..398e0a03a9bbf0f668273a77774438ef94f294d1
--- /dev/null
+++ b/ollama/integration/llm_test.go
@@ -0,0 +1,47 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+)
+
+// TODO - this would ideally be in the llm package, but that would require some refactoring of interfaces in the server
+// package to avoid circular dependencies
+
+var (
+ stream = false
+ req = [2]api.GenerateRequest{
+ {
+ Model: "orca-mini",
+ Prompt: "why is the ocean blue?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "orca-mini",
+ Prompt: "what is the origin of the us thanksgiving holiday?",
+ Stream: &stream,
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ },
+ }
+ resp = [2][]string{
+ {"sunlight"},
+ {"england", "english", "massachusetts", "pilgrims"},
+ }
+)
+
+func TestIntegrationSimpleOrcaMini(t *testing.T) {
+ ctx, cancel := context.WithTimeout(context.Background(), time.Second*120)
+ defer cancel()
+ GenerateTestHelper(ctx, t, req[0], resp[0])
+}
diff --git a/ollama/integration/max_queue_test.go b/ollama/integration/max_queue_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..ec9e085a5ad851796dd3975475b04b2d93311b34
--- /dev/null
+++ b/ollama/integration/max_queue_test.go
@@ -0,0 +1,120 @@
+//go:build integration
+
+package integration
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/ollama/ollama/api"
+ "github.com/ollama/ollama/envconfig"
+)
+
+func TestMaxQueue(t *testing.T) {
+ if os.Getenv("OLLAMA_TEST_EXISTING") != "" {
+ t.Skip("Max Queue test requires spawing a local server so we can adjust the queue size")
+ return
+ }
+
+ // Note: This test can be quite slow when running in CPU mode, so keep the threadCount low unless your on GPU
+ // Also note that by default Darwin can't sustain > ~128 connections without adjusting limits
+ threadCount := 32
+ if maxQueue := envconfig.MaxQueue(); maxQueue != 0 {
+ threadCount = int(maxQueue)
+ } else {
+ t.Setenv("OLLAMA_MAX_QUEUE", strconv.Itoa(threadCount))
+ }
+
+ req := api.GenerateRequest{
+ Model: "orca-mini",
+ Prompt: "write a long historical fiction story about christopher columbus. use at least 10 facts from his actual journey",
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }
+ resp := []string{"explore", "discover", "ocean"}
+
+ // CPU mode takes much longer at the limit with a large queue setting
+ ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
+ defer cancel()
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+
+ require.NoError(t, PullIfMissing(ctx, client, req.Model))
+
+ // Context for the worker threads so we can shut them down
+ // embedCtx, embedCancel := context.WithCancel(ctx)
+ embedCtx := ctx
+
+ var genwg sync.WaitGroup
+ go func() {
+ genwg.Add(1)
+ defer genwg.Done()
+ slog.Info("Starting generate request")
+ DoGenerate(ctx, t, client, req, resp, 45*time.Second, 5*time.Second)
+ slog.Info("generate completed")
+ }()
+
+ // Give the generate a chance to get started before we start hammering on embed requests
+ time.Sleep(5 * time.Millisecond)
+
+ threadCount += 10 // Add a few extra to ensure we push the queue past its limit
+ busyCount := 0
+ resetByPeerCount := 0
+ canceledCount := 0
+ succesCount := 0
+ counterMu := sync.Mutex{}
+ var embedwg sync.WaitGroup
+ for i := 0; i < threadCount; i++ {
+ go func(i int) {
+ embedwg.Add(1)
+ defer embedwg.Done()
+ slog.Info("embed started", "id", i)
+ embedReq := api.EmbeddingRequest{
+ Model: req.Model,
+ Prompt: req.Prompt,
+ Options: req.Options,
+ }
+ // Fresh client for every request
+ client, _ = GetTestEndpoint()
+
+ resp, genErr := client.Embeddings(embedCtx, &embedReq)
+ counterMu.Lock()
+ defer counterMu.Unlock()
+ switch {
+ case genErr == nil:
+ succesCount++
+ require.Greater(t, len(resp.Embedding), 5) // somewhat arbitrary, but sufficient to be reasonable
+ case errors.Is(genErr, context.Canceled):
+ canceledCount++
+ case strings.Contains(genErr.Error(), "busy"):
+ busyCount++
+ case strings.Contains(genErr.Error(), "connection reset by peer"):
+ resetByPeerCount++
+ default:
+ require.NoError(t, genErr, "%d request failed", i)
+ }
+
+ slog.Info("embed finished", "id", i)
+ }(i)
+ }
+ genwg.Wait()
+ slog.Info("generate done, waiting for embeds")
+ embedwg.Wait()
+
+ slog.Info("embeds completed", "success", succesCount, "busy", busyCount, "reset", resetByPeerCount, "canceled", canceledCount)
+ require.Equal(t, resetByPeerCount, 0, "Connections reset by peer, have you updated your fd and socket limits?")
+ require.True(t, busyCount > 0, "no requests hit busy error but some should have")
+ require.True(t, canceledCount == 0, "no requests should have been canceled due to timeout")
+
+}
diff --git a/ollama/integration/utils_test.go b/ollama/integration/utils_test.go
new file mode 100755
index 0000000000000000000000000000000000000000..a60109958bbb7dea3504c281bcd5bfdf8b759173
--- /dev/null
+++ b/ollama/integration/utils_test.go
@@ -0,0 +1,343 @@
+//go:build integration
+
+package integration
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "log/slog"
+ "math/rand"
+ "net"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/ollama/ollama/api"
+ "github.com/ollama/ollama/app/lifecycle"
+ "github.com/stretchr/testify/require"
+)
+
+func Init() {
+ lifecycle.InitLogging()
+}
+
+func FindPort() string {
+ port := 0
+ if a, err := net.ResolveTCPAddr("tcp", "localhost:0"); err == nil {
+ var l *net.TCPListener
+ if l, err = net.ListenTCP("tcp", a); err == nil {
+ port = l.Addr().(*net.TCPAddr).Port
+ l.Close()
+ }
+ }
+ if port == 0 {
+ port = rand.Intn(65535-49152) + 49152 // get a random port in the ephemeral range
+ }
+ return strconv.Itoa(port)
+}
+
+func GetTestEndpoint() (*api.Client, string) {
+ defaultPort := "11434"
+ ollamaHost := os.Getenv("OLLAMA_HOST")
+
+ scheme, hostport, ok := strings.Cut(ollamaHost, "://")
+ if !ok {
+ scheme, hostport = "http", ollamaHost
+ }
+
+ // trim trailing slashes
+ hostport = strings.TrimRight(hostport, "/")
+
+ host, port, err := net.SplitHostPort(hostport)
+ if err != nil {
+ host, port = "127.0.0.1", defaultPort
+ if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
+ host = ip.String()
+ } else if hostport != "" {
+ host = hostport
+ }
+ }
+
+ if os.Getenv("OLLAMA_TEST_EXISTING") == "" && port == defaultPort {
+ port = FindPort()
+ }
+
+ slog.Info("server connection", "host", host, "port", port)
+
+ return api.NewClient(
+ &url.URL{
+ Scheme: scheme,
+ Host: net.JoinHostPort(host, port),
+ },
+ http.DefaultClient), fmt.Sprintf("%s:%s", host, port)
+}
+
+var serverMutex sync.Mutex
+var serverReady bool
+
+func startServer(t *testing.T, ctx context.Context, ollamaHost string) error {
+ // Make sure the server has been built
+ CLIName, err := filepath.Abs("../ollama")
+ if err != nil {
+ return err
+ }
+
+ if runtime.GOOS == "windows" {
+ CLIName += ".exe"
+ }
+ _, err = os.Stat(CLIName)
+ if err != nil {
+ return fmt.Errorf("CLI missing, did you forget to build first? %w", err)
+ }
+ serverMutex.Lock()
+ defer serverMutex.Unlock()
+ if serverReady {
+ return nil
+ }
+
+ if tmp := os.Getenv("OLLAMA_HOST"); tmp != ollamaHost {
+ slog.Info("setting env", "OLLAMA_HOST", ollamaHost)
+ t.Setenv("OLLAMA_HOST", ollamaHost)
+ }
+
+ slog.Info("starting server", "url", ollamaHost)
+ done, err := lifecycle.SpawnServer(ctx, "../ollama")
+ if err != nil {
+ return fmt.Errorf("failed to start server: %w", err)
+ }
+
+ go func() {
+ <-ctx.Done()
+ serverMutex.Lock()
+ defer serverMutex.Unlock()
+ exitCode := <-done
+ if exitCode > 0 {
+ slog.Warn("server failure", "exit", exitCode)
+ }
+ serverReady = false
+ }()
+
+ // TODO wait only long enough for the server to be responsive...
+ time.Sleep(500 * time.Millisecond)
+
+ serverReady = true
+ return nil
+}
+
+func PullIfMissing(ctx context.Context, client *api.Client, modelName string) error {
+ slog.Info("checking status of model", "model", modelName)
+ showReq := &api.ShowRequest{Name: modelName}
+
+ showCtx, cancel := context.WithDeadlineCause(
+ ctx,
+ time.Now().Add(10*time.Second),
+ fmt.Errorf("show for existing model %s took too long", modelName),
+ )
+ defer cancel()
+ _, err := client.Show(showCtx, showReq)
+ var statusError api.StatusError
+ switch {
+ case errors.As(err, &statusError) && statusError.StatusCode == http.StatusNotFound:
+ break
+ case err != nil:
+ return err
+ default:
+ slog.Info("model already present", "model", modelName)
+ return nil
+ }
+ slog.Info("model missing", "model", modelName)
+
+ stallDuration := 30 * time.Second // This includes checksum verification, which can take a while on larger models
+ stallTimer := time.NewTimer(stallDuration)
+ fn := func(resp api.ProgressResponse) error {
+ // fmt.Print(".")
+ if !stallTimer.Reset(stallDuration) {
+ return errors.New("stall was detected, aborting status reporting")
+ }
+ return nil
+ }
+
+ stream := true
+ pullReq := &api.PullRequest{Name: modelName, Stream: &stream}
+
+ var pullError error
+
+ done := make(chan int)
+ go func() {
+ pullError = client.Pull(ctx, pullReq, fn)
+ done <- 0
+ }()
+
+ select {
+ case <-stallTimer.C:
+ return errors.New("download stalled")
+ case <-done:
+ return pullError
+ }
+}
+
+var serverProcMutex sync.Mutex
+
+// Returns an Client, the testEndpoint, and a cleanup function, fails the test on errors
+// Starts the server if needed
+func InitServerConnection(ctx context.Context, t *testing.T) (*api.Client, string, func()) {
+ client, testEndpoint := GetTestEndpoint()
+ if os.Getenv("OLLAMA_TEST_EXISTING") == "" {
+ serverProcMutex.Lock()
+ fp, err := os.CreateTemp("", "ollama-server-*.log")
+ if err != nil {
+ t.Fatalf("failed to generate log file: %s", err)
+ }
+ lifecycle.ServerLogFile = fp.Name()
+ fp.Close()
+ require.NoError(t, startServer(t, ctx, testEndpoint))
+ }
+
+ return client, testEndpoint, func() {
+ if os.Getenv("OLLAMA_TEST_EXISTING") == "" {
+ defer serverProcMutex.Unlock()
+ if t.Failed() {
+ fp, err := os.Open(lifecycle.ServerLogFile)
+ if err != nil {
+ slog.Error("failed to open server log", "logfile", lifecycle.ServerLogFile, "error", err)
+ return
+ }
+ data, err := io.ReadAll(fp)
+ if err != nil {
+ slog.Error("failed to read server log", "logfile", lifecycle.ServerLogFile, "error", err)
+ return
+ }
+ slog.Warn("SERVER LOG FOLLOWS")
+ os.Stderr.Write(data)
+ slog.Warn("END OF SERVER")
+ }
+ err := os.Remove(lifecycle.ServerLogFile)
+ if err != nil && !os.IsNotExist(err) {
+ slog.Warn("failed to cleanup", "logfile", lifecycle.ServerLogFile, "error", err)
+ }
+ }
+ }
+}
+
+func GenerateTestHelper(ctx context.Context, t *testing.T, genReq api.GenerateRequest, anyResp []string) {
+ client, _, cleanup := InitServerConnection(ctx, t)
+ defer cleanup()
+ require.NoError(t, PullIfMissing(ctx, client, genReq.Model))
+ DoGenerate(ctx, t, client, genReq, anyResp, 30*time.Second, 10*time.Second)
+}
+
+func DoGenerate(ctx context.Context, t *testing.T, client *api.Client, genReq api.GenerateRequest, anyResp []string, initialTimeout, streamTimeout time.Duration) {
+ stallTimer := time.NewTimer(initialTimeout)
+ var buf bytes.Buffer
+ fn := func(response api.GenerateResponse) error {
+ // fmt.Print(".")
+ buf.Write([]byte(response.Response))
+ if !stallTimer.Reset(streamTimeout) {
+ return errors.New("stall was detected while streaming response, aborting")
+ }
+ return nil
+ }
+
+ stream := true
+ genReq.Stream = &stream
+ done := make(chan int)
+ var genErr error
+ go func() {
+ genErr = client.Generate(ctx, &genReq, fn)
+ done <- 0
+ }()
+
+ select {
+ case <-stallTimer.C:
+ if buf.Len() == 0 {
+ t.Errorf("generate never started. Timed out after :%s", initialTimeout.String())
+ } else {
+ t.Errorf("generate stalled. Response so far:%s", buf.String())
+ }
+ case <-done:
+ require.NoError(t, genErr, "failed with %s request prompt %s ", genReq.Model, genReq.Prompt)
+ // Verify the response contains the expected data
+ response := buf.String()
+ atLeastOne := false
+ for _, resp := range anyResp {
+ if strings.Contains(strings.ToLower(response), resp) {
+ atLeastOne = true
+ break
+ }
+ }
+ require.True(t, atLeastOne, "none of %v found in %s", anyResp, response)
+ slog.Info("test pass", "model", genReq.Model, "prompt", genReq.Prompt, "contains", anyResp, "response", response)
+ case <-ctx.Done():
+ t.Error("outer test context done while waiting for generate")
+ }
+}
+
+// Generate a set of requests
+// By default each request uses orca-mini as the model
+func GenerateRequests() ([]api.GenerateRequest, [][]string) {
+ return []api.GenerateRequest{
+ {
+ Model: "orca-mini",
+ Prompt: "why is the ocean blue?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "orca-mini",
+ Prompt: "why is the color of dirt brown?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "orca-mini",
+ Prompt: "what is the origin of the us thanksgiving holiday?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "orca-mini",
+ Prompt: "what is the origin of independence day?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ }, {
+ Model: "orca-mini",
+ Prompt: "what is the composition of air?",
+ Stream: &stream,
+ KeepAlive: &api.Duration{Duration: 10 * time.Second},
+ Options: map[string]interface{}{
+ "seed": 42,
+ "temperature": 0.0,
+ },
+ },
+ },
+ [][]string{
+ {"sunlight"},
+ {"soil", "organic", "earth", "black", "tan"},
+ {"england", "english", "massachusetts", "pilgrims", "british"},
+ {"fourth", "july", "declaration", "independence"},
+ {"nitrogen", "oxygen", "carbon", "dioxide"},
+ }
+}
diff --git a/ollama/llm/ext_server/CMakeLists.txt b/ollama/llm/ext_server/CMakeLists.txt
new file mode 100755
index 0000000000000000000000000000000000000000..bfc97c63d99f2561ddd25908d8ce0f2e33a11618
--- /dev/null
+++ b/ollama/llm/ext_server/CMakeLists.txt
@@ -0,0 +1,13 @@
+set(TARGET ollama_llama_server)
+option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON)
+include_directories(${CMAKE_CURRENT_SOURCE_DIR})
+add_executable(${TARGET} server.cpp utils.hpp json.hpp httplib.h)
+install(TARGETS ${TARGET} RUNTIME)
+target_compile_definitions(${TARGET} PRIVATE
+ SERVER_VERBOSE=$
+)
+target_link_libraries(${TARGET} PRIVATE ggml llama common llava ${CMAKE_THREAD_LIBS_INIT})
+if (WIN32)
+ TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32)
+endif()
+target_compile_features(${TARGET} PRIVATE cxx_std_11)
\ No newline at end of file
diff --git a/ollama/llm/ext_server/httplib.h b/ollama/llm/ext_server/httplib.h
new file mode 100755
index 0000000000000000000000000000000000000000..28746000cddceb5ecb8d8b56e7a25ab08ea7ee4f
--- /dev/null
+++ b/ollama/llm/ext_server/httplib.h
@@ -0,0 +1,8794 @@
+//
+// httplib.h
+//
+// Copyright (c) 2023 Yuji Hirose. All rights reserved.
+// MIT License
+//
+
+#ifndef CPPHTTPLIB_HTTPLIB_H
+#define CPPHTTPLIB_HTTPLIB_H
+
+#define CPPHTTPLIB_VERSION "0.12.2"
+
+/*
+ * Configuration
+ */
+
+#ifndef CPPHTTPLIB_KEEPALIVE_TIMEOUT_SECOND
+#define CPPHTTPLIB_KEEPALIVE_TIMEOUT_SECOND 5
+#endif
+
+#ifndef CPPHTTPLIB_KEEPALIVE_MAX_COUNT
+#define CPPHTTPLIB_KEEPALIVE_MAX_COUNT 5
+#endif
+
+#ifndef CPPHTTPLIB_CONNECTION_TIMEOUT_SECOND
+#define CPPHTTPLIB_CONNECTION_TIMEOUT_SECOND 300
+#endif
+
+#ifndef CPPHTTPLIB_CONNECTION_TIMEOUT_USECOND
+#define CPPHTTPLIB_CONNECTION_TIMEOUT_USECOND 0
+#endif
+
+#ifndef CPPHTTPLIB_READ_TIMEOUT_SECOND
+#define CPPHTTPLIB_READ_TIMEOUT_SECOND 5
+#endif
+
+#ifndef CPPHTTPLIB_READ_TIMEOUT_USECOND
+#define CPPHTTPLIB_READ_TIMEOUT_USECOND 0
+#endif
+
+#ifndef CPPHTTPLIB_WRITE_TIMEOUT_SECOND
+#define CPPHTTPLIB_WRITE_TIMEOUT_SECOND 5
+#endif
+
+#ifndef CPPHTTPLIB_WRITE_TIMEOUT_USECOND
+#define CPPHTTPLIB_WRITE_TIMEOUT_USECOND 0
+#endif
+
+#ifndef CPPHTTPLIB_IDLE_INTERVAL_SECOND
+#define CPPHTTPLIB_IDLE_INTERVAL_SECOND 0
+#endif
+
+#ifndef CPPHTTPLIB_IDLE_INTERVAL_USECOND
+#ifdef _WIN32
+#define CPPHTTPLIB_IDLE_INTERVAL_USECOND 10000
+#else
+#define CPPHTTPLIB_IDLE_INTERVAL_USECOND 0
+#endif
+#endif
+
+#ifndef CPPHTTPLIB_REQUEST_URI_MAX_LENGTH
+#define CPPHTTPLIB_REQUEST_URI_MAX_LENGTH 8192
+#endif
+
+#ifndef CPPHTTPLIB_HEADER_MAX_LENGTH
+#define CPPHTTPLIB_HEADER_MAX_LENGTH 8192
+#endif
+
+#ifndef CPPHTTPLIB_REDIRECT_MAX_COUNT
+#define CPPHTTPLIB_REDIRECT_MAX_COUNT 20
+#endif
+
+#ifndef CPPHTTPLIB_MULTIPART_FORM_DATA_FILE_MAX_COUNT
+#define CPPHTTPLIB_MULTIPART_FORM_DATA_FILE_MAX_COUNT 1024
+#endif
+
+#ifndef CPPHTTPLIB_PAYLOAD_MAX_LENGTH
+#define CPPHTTPLIB_PAYLOAD_MAX_LENGTH ((std::numeric_limits::max)())
+#endif
+
+#ifndef CPPHTTPLIB_FORM_URL_ENCODED_PAYLOAD_MAX_LENGTH
+#define CPPHTTPLIB_FORM_URL_ENCODED_PAYLOAD_MAX_LENGTH 8192
+#endif
+
+#ifndef CPPHTTPLIB_TCP_NODELAY
+#define CPPHTTPLIB_TCP_NODELAY false
+#endif
+
+#ifndef CPPHTTPLIB_RECV_BUFSIZ
+#define CPPHTTPLIB_RECV_BUFSIZ size_t(4096u)
+#endif
+
+#ifndef CPPHTTPLIB_COMPRESSION_BUFSIZ
+#define CPPHTTPLIB_COMPRESSION_BUFSIZ size_t(16384u)
+#endif
+
+#ifndef CPPHTTPLIB_THREAD_POOL_COUNT
+#define CPPHTTPLIB_THREAD_POOL_COUNT \
+ ((std::max)(8u, std::thread::hardware_concurrency() > 0 \
+ ? std::thread::hardware_concurrency() - 1 \
+ : 0))
+#endif
+
+#ifndef CPPHTTPLIB_RECV_FLAGS
+#define CPPHTTPLIB_RECV_FLAGS 0
+#endif
+
+#ifndef CPPHTTPLIB_SEND_FLAGS
+#define CPPHTTPLIB_SEND_FLAGS 0
+#endif
+
+#ifndef CPPHTTPLIB_LISTEN_BACKLOG
+#define CPPHTTPLIB_LISTEN_BACKLOG 5
+#endif
+
+/*
+ * Headers
+ */
+
+#ifdef _WIN32
+#ifndef _CRT_SECURE_NO_WARNINGS
+#define _CRT_SECURE_NO_WARNINGS
+#endif //_CRT_SECURE_NO_WARNINGS
+
+#ifndef _CRT_NONSTDC_NO_DEPRECATE
+#define _CRT_NONSTDC_NO_DEPRECATE
+#endif //_CRT_NONSTDC_NO_DEPRECATE
+
+#if defined(_MSC_VER)
+#if _MSC_VER < 1900
+#error Sorry, Visual Studio versions prior to 2015 are not supported
+#endif
+
+#pragma comment(lib, "ws2_32.lib")
+
+#ifdef _WIN64
+using ssize_t = __int64;
+#else
+using ssize_t = long;
+#endif
+#endif // _MSC_VER
+
+#ifndef S_ISREG
+#define S_ISREG(m) (((m)&S_IFREG) == S_IFREG)
+#endif // S_ISREG
+
+#ifndef S_ISDIR
+#define S_ISDIR(m) (((m)&S_IFDIR) == S_IFDIR)
+#endif // S_ISDIR
+
+#ifndef NOMINMAX
+#define NOMINMAX
+#endif // NOMINMAX
+
+#include
+#include
+#include
+
+#ifndef WSA_FLAG_NO_HANDLE_INHERIT
+#define WSA_FLAG_NO_HANDLE_INHERIT 0x80
+#endif
+
+#ifndef strcasecmp
+#define strcasecmp _stricmp
+#endif // strcasecmp
+
+using socket_t = SOCKET;
+#ifdef CPPHTTPLIB_USE_POLL
+#define poll(fds, nfds, timeout) WSAPoll(fds, nfds, timeout)
+#endif
+
+#else // not _WIN32
+
+#include
+#ifndef _AIX
+#include
+#endif
+#include
+#include
+#include
+#ifdef __linux__
+#include
+#endif
+#include
+#ifdef CPPHTTPLIB_USE_POLL
+#include
+#endif
+#include
+#include
+#include
+#include
+#include
+#include
+
+using socket_t = int;
+#ifndef INVALID_SOCKET
+#define INVALID_SOCKET (-1)
+#endif
+#endif //_WIN32
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include