diff --git a/.data/Izmenjen-clan-17-Pravilnika-o-sadrzini-nacinu-i-postupku-izrade-i-nacin-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata.pdf b/.data/Izmenjen-clan-17-Pravilnika-o-sadrzini-nacinu-i-postupku-izrade-i-nacin-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata.pdf new file mode 100644 index 000000000..98fe84f6f Binary files /dev/null and b/.data/Izmenjen-clan-17-Pravilnika-o-sadrzini-nacinu-i-postupku-izrade-i-nacin-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata.pdf differ diff --git a/.data/Pravilnik-o-energetskoj-efikasnosti-zgrada.pdf b/.data/Pravilnik-o-energetskoj-efikasnosti-zgrada.pdf new file mode 100644 index 000000000..0e7461979 Binary files /dev/null and b/.data/Pravilnik-o-energetskoj-efikasnosti-zgrada.pdf differ diff --git a/.data/Pravilnik-o-izgledu-sadrzini-i-mestu-postavljanja-gradilisne-table.pdf b/.data/Pravilnik-o-izgledu-sadrzini-i-mestu-postavljanja-gradilisne-table.pdf new file mode 100644 index 000000000..7e5990f4d Binary files /dev/null and b/.data/Pravilnik-o-izgledu-sadrzini-i-mestu-postavljanja-gradilisne-table.pdf differ diff --git a/.data/Pravilnik-o-katastarskom-premeru-i-katastru-nepokretnosti.pdf b/.data/Pravilnik-o-katastarskom-premeru-i-katastru-nepokretnosti.pdf new file mode 100644 index 000000000..fc1a11de2 Binary files /dev/null and b/.data/Pravilnik-o-katastarskom-premeru-i-katastru-nepokretnosti.pdf differ diff --git a/.data/Pravilnik-o-nacinu-zatvaranja-i-obelezavanju-zatvorenog-gradilista.pdf b/.data/Pravilnik-o-nacinu-zatvaranja-i-obelezavanju-zatvorenog-gradilista.pdf new file mode 100644 index 000000000..3c91de12e Binary files /dev/null and b/.data/Pravilnik-o-nacinu-zatvaranja-i-obelezavanju-zatvorenog-gradilista.pdf differ diff --git a/.data/Pravilnik-o-objektima-na-koje-se-ne-primenjuju-pojedine-odredbe-Zakona-o-planiranju-i-izgradnji.pdf b/.data/Pravilnik-o-objektima-na-koje-se-ne-primenjuju-pojedine-odredbe-Zakona-o-planiranju-i-izgradnji.pdf new file mode 100644 index 000000000..f13c28dff Binary files /dev/null and b/.data/Pravilnik-o-objektima-na-koje-se-ne-primenjuju-pojedine-odredbe-Zakona-o-planiranju-i-izgradnji.pdf differ diff --git a/.data/Pravilnik-o-postupku-sprovodjenja-objedinjene-1.pdf b/.data/Pravilnik-o-postupku-sprovodjenja-objedinjene-1.pdf new file mode 100644 index 000000000..8fe7277de Binary files /dev/null and b/.data/Pravilnik-o-postupku-sprovodjenja-objedinjene-1.pdf differ diff --git a/.data/Pravilnik-o-sadrzini-i-nacinu-vrsenja-tehnickog-pregleda-objekta.pdf b/.data/Pravilnik-o-sadrzini-i-nacinu-vrsenja-tehnickog-pregleda-objekta.pdf new file mode 100644 index 000000000..f3493610b Binary files /dev/null and b/.data/Pravilnik-o-sadrzini-i-nacinu-vrsenja-tehnickog-pregleda-objekta.pdf differ diff --git a/.data/Pravilnik-o-sadrzini-nacinu-i-postupku-izrade-i-nacinu-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata-2018.pdf b/.data/Pravilnik-o-sadrzini-nacinu-i-postupku-izrade-i-nacinu-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata-2018.pdf new file mode 100644 index 000000000..8b713a87a Binary files /dev/null and b/.data/Pravilnik-o-sadrzini-nacinu-i-postupku-izrade-i-nacinu-vrsenja-kontrole-tehnicke-dokumentacije-prema-klasi-i-nameni-objekata-2018.pdf differ diff --git a/.data/Pravilnik-o-sadrzini-postupku-i-nacinu-donosenja-programa-uredjivanja-gradjevinskog-zemljista.pdf b/.data/Pravilnik-o-sadrzini-postupku-i-nacinu-donosenja-programa-uredjivanja-gradjevinskog-zemljista.pdf new file mode 100644 index 000000000..bac4ab2a4 Binary files /dev/null and b/.data/Pravilnik-o-sadrzini-postupku-i-nacinu-donosenja-programa-uredjivanja-gradjevinskog-zemljista.pdf differ diff --git a/.data/Pravilnik-o-uslovima-i-normativima-za-projektovanje-stambenih-zgrada-i-stanova.pdf b/.data/Pravilnik-o-uslovima-i-normativima-za-projektovanje-stambenih-zgrada-i-stanova.pdf new file mode 100644 index 000000000..a5640f407 Binary files /dev/null and b/.data/Pravilnik-o-uslovima-i-normativima-za-projektovanje-stambenih-zgrada-i-stanova.pdf differ diff --git a/.data/Pravilnik-o-uslovima-osiguranja-od-profesionalne-odgovornosti.pdf b/.data/Pravilnik-o-uslovima-osiguranja-od-profesionalne-odgovornosti.pdf new file mode 100644 index 000000000..ac2118b92 Binary files /dev/null and b/.data/Pravilnik-o-uslovima-osiguranja-od-profesionalne-odgovornosti.pdf differ diff --git a/.data/Pravilnik-o-uslovima-sadrzini-i-nacinu-izdavanja-sertifikata-o-energetskim-svojstvima-zgrada.pdf b/.data/Pravilnik-o-uslovima-sadrzini-i-nacinu-izdavanja-sertifikata-o-energetskim-svojstvima-zgrada.pdf new file mode 100644 index 000000000..3cf99c462 Binary files /dev/null and b/.data/Pravilnik-o-uslovima-sadrzini-i-nacinu-izdavanja-sertifikata-o-energetskim-svojstvima-zgrada.pdf differ diff --git a/level_4/.env.template b/.env.template similarity index 66% rename from level_4/.env.template rename to .env.template index bfa1122b3..5dbb7eab3 100644 --- a/level_4/.env.template +++ b/.env.template @@ -1,7 +1,7 @@ OPENAI_API_KEY=sk WEAVIATE_URL = WEAVIATE_API_KEY = -ENVIRONMENT = docker +ENV = docker POSTGRES_USER = bla POSTGRES_PASSWORD = bla POSTGRES_DB = bubu @@ -9,6 +9,10 @@ POSTGRES_HOST = localhost POSTGRES_HOST_DOCKER = postgres SEGMENT_KEY = Etl4WJwzOkeDPAjaOXOMgyU16hO7mV7B COG_ARCH_DIR = cognitive_architecture - GRAPH_DB_URL = - GRAPH_DB_PW = - GRAPH_DB_USER = +GRAPH_DB_URL = +GRAPH_DB_PW = +GRAPH_DB_USER = +AWS_ACCESS_KEY_ID = +AWS_SECRET_ACCESS_KEY = +QDRANT_API_KEY +QDRANT_API_URL \ No newline at end of file diff --git a/.github/actions/image_builder/action.yaml b/.github/actions/image_builder/action.yaml index 607b850af..ad516ca1d 100644 --- a/.github/actions/image_builder/action.yaml +++ b/.github/actions/image_builder/action.yaml @@ -33,4 +33,3 @@ runs: image_name="${{ inputs.ecr_image_repo_name }}" docker_login="true" version="$VERSION" account="${{ inputs.aws_account_id }}" app_dir="$APP_DIR" publish="${{ inputs.should_publish }}" ./bin/dockerize echo "Docker tag is: $VERSION" echo $VERSION > /tmp/.DOCKER_IMAGE_VERSION - diff --git a/.github/workflows/cd.yaml b/.github/workflows/cd.yaml index 9048afb3c..c42af72b2 100644 --- a/.github/workflows/cd.yaml +++ b/.github/workflows/cd.yaml @@ -47,8 +47,8 @@ jobs: stage: dev aws_account_id: ${{ env.AWS_ACCOUNT_ID_DEV }} should_publish: true - ecr_image_repo_name: promethai-dev-backend-promethai-backend-memory - dockerfile_location: level_2/. + ecr_image_repo_name: promethai-dev-backend-promethai-backend + dockerfile_location: ./ - name: Export Docker image tag id: export-promethai-docker-tag run: | @@ -58,19 +58,19 @@ jobs: outputs: promethai_docker_tag_backend: ${{ steps.export-promethai-docker-tag.outputs.promethai_docker_tag_backend }} -# apply_tf: -# name: Trigger terraform apply workflow -# runs-on: ubuntu-latest -# needs: publish_docker_to_ecr -# steps: -# - name: TF apply workflow triggers step -# uses: actions/github-script@v6 -# with: -# github-token: ${{ secrets.PAT_FOR_CROSS_REPOS_CICD_TRIGGERING }} -# script: | -# await github.rest.actions.createWorkflowDispatch({ -# owner: 'topoteretes', -# repo: 'PromethAI-Infra', -# workflow_id: 'terraform.apply.yml', -# ref: 'main' -# }) + apply_tf: + name: Trigger terraform apply workflow + runs-on: ubuntu-latest + needs: publish_docker_to_ecr + steps: + - name: TF apply workflow triggers step + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.PAT_FOR_CROSS_REPOS_CICD_TRIGGERING }} + script: | + await github.rest.actions.createWorkflowDispatch({ + owner: 'topoteretes', + repo: 'PromethAI-Infra', + workflow_id: 'terraform.apply.yml', + ref: 'main' + }) diff --git a/.github/workflows/cd_prd.yaml b/.github/workflows/cd_prd.yaml index 5b7f0247a..e73271408 100644 --- a/.github/workflows/cd_prd.yaml +++ b/.github/workflows/cd_prd.yaml @@ -23,38 +23,38 @@ jobs: steps: - name: Take code from repo uses: actions/checkout@v3 -# - name: Set environment variable for stage -# id: set-env -# run: | -# if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then -# echo "STAGE=prd" >> $GITHUB_ENV -# echo "::set-output name=stage::prd" -# else -# echo "STAGE=dev" >> $GITHUB_ENV -# echo "::set-output name=stage::dev" -# fi -# - name: Use output -# run: echo "The stage is ${{ steps.set-env.outputs.stage }}" -# - name: Configure AWS credentials -# uses: aws-actions/configure-aws-credentials@v1 -# with: -# role-to-assume: ${{ env.AWS_ROLE_DEV_CICD }} -# aws-region: eu-west-1 -# - name: Create Docker image and push to ECR -# uses: ./.github/actions/image_builder -# id: generate-promethai-docker -# with: -# stage: prd -# aws_account_id: ${{ env.AWS_ACCOUNT_ID_DEV }} -# should_publish: true -# ecr_image_repo_name: promethai-prd-backend-promethai-backend-memory -# dockerfile_location: ./level_2 -# - name: Export Docker image tag -# id: export-promethai-docker-tag -# run: | -# export DOCKER_TAG=$(cat /tmp/.DOCKER_IMAGE_VERSION) -# echo "Docker tag is: $DOCKER_TAG" -# echo "promethai_docker_tag_backend=$DOCKER_TAG" >> $GITHUB_OUTPUT + - name: Set environment variable for stage + id: set-env + run: | + if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then + echo "STAGE=prd" >> $GITHUB_ENV + echo "::set-output name=stage::prd" + else + echo "STAGE=dev" >> $GITHUB_ENV + echo "::set-output name=stage::dev" + fi + - name: Use output + run: echo "The stage is ${{ steps.set-env.outputs.stage }}" + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ env.AWS_ROLE_DEV_CICD }} + aws-region: eu-west-1 + - name: Create Docker image and push to ECR + uses: ./.github/actions/image_builder + id: generate-promethai-docker + with: + stage: prd + aws_account_id: ${{ env.AWS_ACCOUNT_ID_DEV }} + should_publish: true + ecr_image_repo_name: promethai-prd-backend-promethai-backend + dockerfile_location: ./ + - name: Export Docker image tag + id: export-promethai-docker-tag + run: | + export DOCKER_TAG=$(cat /tmp/.DOCKER_IMAGE_VERSION) + echo "Docker tag is: $DOCKER_TAG" + echo "promethai_docker_tag_backend=$DOCKER_TAG" >> $GITHUB_OUTPUT # - name: Create Tag and Release # runs-on: ubuntu-latest @@ -78,22 +78,22 @@ jobs: # with: # tag_name: ${{ steps.bump_version_and_push_tag.outputs.tag }} # release_name: Release ${{ steps.bump_version_and_push_tag.outputs.tag }} -# outputs: -# promethai_docker_tag_backend: ${{ steps.export-promethai-docker-tag.outputs.promethai_docker_tag_backend }} + outputs: + promethai_docker_tag_backend: ${{ steps.export-promethai-docker-tag.outputs.promethai_docker_tag_backend }} -# apply_tf: -# name: Trigger terraform apply workflow -# runs-on: ubuntu-latest -# needs: publish_docker_to_ecr -# steps: -# - name: TF apply workflow triggers step -# uses: actions/github-script@v6 -# with: -# github-token: ${{ secrets.PAT_FOR_CROSS_REPOS_CICD_TRIGGERING }} -# script: | -# await github.rest.actions.createWorkflowDispatch({ -# owner: 'topoteretes', -# repo: 'PromethAI-Infra', -# workflow_id: 'terraform.apply.yml', -# ref: 'main' -# }) + apply_tf: + name: Trigger terraform apply workflow + runs-on: ubuntu-latest + needs: publish_docker_to_ecr + steps: + - name: TF apply workflow triggers step + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.PAT_FOR_CROSS_REPOS_CICD_TRIGGERING }} + script: | + await github.rest.actions.createWorkflowDispatch({ + owner: 'topoteretes', + repo: 'PromethAI-Infra', + workflow_id: 'terraform.apply.yml', + ref: 'main' + }) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bc6e7f7ce..dba210ac0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -20,6 +20,6 @@ jobs: export SHA_SHORT="$(git rev-parse --short HEAD)" export CUR_DATE="$(date +%Y%m%d%H%M%S)" export VERSION="dev-$CUR_DATE-$SHA_SHORT" - image_name="backend-memory" docker_login="false" version="$VERSION" account="${{ env.AWS_ACCOUNT_ID_DEV }}" app_dir="level_2" publish="false" ./bin/dockerize - export DOCKER_TAG=$(cat level_2/tmp/.DOCKER_IMAGE_VERSION) + image_name="backend" docker_login="false" version="$VERSION" account="${{ env.AWS_ACCOUNT_ID_DEV }}" app_dir="backend" publish="false" ./bin/dockerize + export DOCKER_TAG=$(cat /tmp/.DOCKER_IMAGE_VERSION) echo "Successfully built PromethAI backend Docker tag is: $DOCKER_TAG" diff --git a/level_4/CONTRIBUTING.md b/CONTRIBUTING.md similarity index 100% rename from level_4/CONTRIBUTING.md rename to CONTRIBUTING.md diff --git a/level_4/Dockerfile b/Dockerfile similarity index 97% rename from level_4/Dockerfile rename to Dockerfile index 76d333d49..f8e1163b4 100644 --- a/level_4/Dockerfile +++ b/Dockerfile @@ -24,6 +24,7 @@ RUN apt-get update -q && \ curl \ zip \ jq \ +# libgl1-mesa-glx \ netcat-traditional && \ pip install poetry && \ curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ diff --git a/LICENSE b/LICENSE index 655c4b26f..261eeb9e9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,201 @@ -MIT License - -Copyright (c) 2023 Topoteretes - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index a01aee267..48299b796 100644 --- a/README.md +++ b/README.md @@ -122,6 +122,7 @@ Run If you are running natively, change ENVIRONMENT to local in the .env file If you are running in docker, change ENVIRONMENT to postgres in the .env file + Run ``` python main.py ``` diff --git a/level_4/__init__.py b/__init__.py similarity index 100% rename from level_4/__init__.py rename to __init__.py diff --git a/api.py b/api.py new file mode 100644 index 000000000..9e0181276 --- /dev/null +++ b/api.py @@ -0,0 +1,342 @@ +import json +import logging +import os +from enum import Enum +from typing import Dict, Any + +import uvicorn +from fastapi import FastAPI, BackgroundTasks, HTTPException +from fastapi.responses import JSONResponse +from pydantic import BaseModel + +from cognitive_architecture.database.postgres.database import AsyncSessionLocal +from cognitive_architecture.database.postgres.database_crud import session_scope +from cognitive_architecture.vectorstore_manager import Memory +from dotenv import load_dotenv +from main import add_documents_to_graph_db, user_context_enrichment +from cognitive_architecture.config import Config + +# Set up logging +logging.basicConfig( + level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL) + format="%(asctime)s [%(levelname)s] %(message)s", # Set the log message format +) + +logger = logging.getLogger(__name__) + + +load_dotenv() +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") +app = FastAPI(debug=True) +# +# from auth.cognito.JWTBearer import JWTBearer +# from auth.auth import jwks +# +# auth = JWTBearer(jwks) + +from fastapi import Depends + + +config = Config() +config.load() + +class ImageResponse(BaseModel): + success: bool + message: str + + +@app.get( + "/", +) +async def root(): + """ + Root endpoint that returns a welcome message. + """ + return {"message": "Hello, World, I am alive!"} + + +@app.get("/health") +def health_check(): + """ + Health check endpoint that returns the server status. + """ + return {"status": "OK"} + + +class Payload(BaseModel): + payload: Dict[str, Any] + +@app.post("/add-memory", response_model=dict) +async def add_memory( + payload: Payload, + # files: List[UploadFile] = File(...), +): + try: + logging.info(" Adding to Memory ") + decoded_payload = payload.payload + async with session_scope(session=AsyncSessionLocal()) as session: + from main import load_documents_to_vectorstore + + if 'settings' in decoded_payload and decoded_payload['settings'] is not None: + settings_for_loader = decoded_payload['settings'] + else: + settings_for_loader = None + + if 'content' in decoded_payload and decoded_payload['content'] is not None: + content = decoded_payload['content'] + else: + content = None + + output = await load_documents_to_vectorstore(session, decoded_payload['user_id'], content=content, loader_settings=settings_for_loader) + return JSONResponse(content={"response": output}, status_code=200) + + except Exception as e: + return JSONResponse( + content={"response": {"error": str(e)}}, status_code=503 + ) + +@app.post("/add-architecture-public-memory", response_model=dict) +async def add_memory( + payload: Payload, + # files: List[UploadFile] = File(...), +): + try: + logging.info(" Adding to Memory ") + decoded_payload = payload.payload + async with session_scope(session=AsyncSessionLocal()) as session: + from main import load_documents_to_vectorstore + if 'content' in decoded_payload and decoded_payload['content'] is not None: + content = decoded_payload['content'] + else: + content = None + + user_id = 'system_user' + loader_settings = { + "format": "PDF", + "source": "DEVICE", + "path": [".data"] + } + + output = await load_documents_to_vectorstore(session, user_id=user_id, content=content, loader_settings=loader_settings) + return JSONResponse(content={"response": output}, status_code=200) + + except Exception as e: + return JSONResponse( + content={"response": {"error": str(e)}}, status_code=503 + ) + +@app.post("/user-query-to-graph") +async def user_query_to_graph(payload: Payload): + try: + from main import user_query_to_graph_db + decoded_payload = payload.payload + # Execute the query - replace this with the actual execution method + async with session_scope(session=AsyncSessionLocal()) as session: + # Assuming you have a method in Neo4jGraphDB to execute the query + result = await user_query_to_graph_db(session= session, user_id= decoded_payload['user_id'],query_input =decoded_payload['query']) + + return result + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/document-to-graph-db") +async def document_to_graph_db(payload: Payload): + logging.info("Adding documents to graph db") + try: + decoded_payload = payload.payload + if 'settings' in decoded_payload and decoded_payload['settings'] is not None: + settings_for_loader = decoded_payload['settings'] + else: + settings_for_loader = None + if 'memory_type' in decoded_payload and decoded_payload['memory_type'] is not None: + memory_type = decoded_payload['memory_type'] + else: + memory_type = None + async with session_scope(session=AsyncSessionLocal()) as session: + result = await add_documents_to_graph_db(session =session, user_id = decoded_payload['user_id'], document_memory_types =memory_type) + return result + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +@app.post("/cognitive-context-enrichment") +async def cognitive_context_enrichment(payload: Payload): + try: + decoded_payload = payload.payload + async with session_scope(session=AsyncSessionLocal()) as session: + result = await user_context_enrichment(session, user_id = decoded_payload['user_id'], query= decoded_payload['query'], generative_response=decoded_payload['generative_response'], memory_type= decoded_payload['memory_type']) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/classify-user-query") +async def classify_user_query(payload: Payload): + try: + decoded_payload = payload.payload + async with session_scope(session=AsyncSessionLocal()) as session: + from main import relevance_feedback + result = await relevance_feedback( query= decoded_payload['query'], input_type=decoded_payload['knowledge_type']) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/user-query-classifier") +async def user_query_classfier(payload: Payload): + try: + decoded_payload = payload.payload + + # Execute the query - replace this with the actual execution method + async with session_scope(session=AsyncSessionLocal()) as session: + from cognitive_architecture.classifiers.classifier import classify_user_query + # Assuming you have a method in Neo4jGraphDB to execute the query + result = await classify_user_query(session, decoded_payload['user_id'], decoded_payload['query']) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + + +@app.post("/drop-db") +async def drop_db(payload: Payload): + try: + decoded_payload = payload.payload + + if decoded_payload['operation'] == 'drop': + + if os.environ.get('AWS_ENV') == 'dev': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') + else: + pass + + from cognitive_architecture.database.create_database_tst import drop_database, create_admin_engine + + engine = create_admin_engine(username, password, host, database_name) + drop_database(engine) + return JSONResponse(content={"response": "DB dropped"}, status_code=200) + else: + + if os.environ.get('AWS_ENV') == 'dev': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') + else: + pass + + from cognitive_architecture.database.create_database_tst import create_database, create_admin_engine + + engine = create_admin_engine(username, password, host, database_name) + create_database(engine) + return JSONResponse(content={"response": " DB created"}, status_code=200) + + + + except Exception as e: + return HTTPException(status_code=500, detail=str(e)) + + +@app.post("/create-public-memory") +async def create_public_memory(payload: Payload): + try: + decoded_payload = payload.payload + + if 'user_id' in decoded_payload and decoded_payload['user_id'] is not None: + user_id = decoded_payload['user_id'] + else: + user_id = None + + if 'labels' in decoded_payload and decoded_payload['labels'] is not None: + labels = decoded_payload['labels'] + else: + labels = None + + if 'topic' in decoded_payload and decoded_payload['topic'] is not None: + topic = decoded_payload['topic'] + else: + topic = None + + # Execute the query - replace this with the actual execution method + # async with session_scope(session=AsyncSessionLocal()) as session: + # from main import create_public_memory + # Assuming you have a method in Neo4jGraphDB to execute the query + result = await create_public_memory(user_id=user_id, labels=labels, topic=topic) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/attach-user-to-public-memory") +async def attach_user_to_public_memory(payload: Payload): + try: + decoded_payload = payload.payload + + if 'topic' in decoded_payload and decoded_payload['topic'] is not None: + topic = decoded_payload['topic'] + else: + topic = None + if 'labels' in decoded_payload and decoded_payload['labels'] is not None: + labels = decoded_payload['labels'] + else: + labels = ['sr'] + + # Execute the query - replace this with the actual execution method + async with session_scope(session=AsyncSessionLocal()) as session: + from main import attach_user_to_memory, create_public_memory + # Assuming you have a method in Neo4jGraphDB to execute the query + await create_public_memory(user_id=decoded_payload['user_id'], topic=topic, labels=labels) + result = await attach_user_to_memory( user_id = decoded_payload['user_id'], topic=topic, labels=labels) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/unlink-user-from-public-memory") +async def unlink_user_from_public_memory(payload: Payload): + try: + decoded_payload = payload.payload + + if 'topic' in decoded_payload and decoded_payload['topic'] is not None: + topic = decoded_payload['topic'] + else: + topic = None + + # Execute the query - replace this with the actual execution method + async with session_scope(session=AsyncSessionLocal()) as session: + from main import unlink_user_from_memory + # Assuming you have a method in Neo4jGraphDB to execute the query + result = await unlink_user_from_memory( user_id = decoded_payload['user_id'], topic=topic, labels=decoded_payload['labels']) + return JSONResponse(content={"response": result}, status_code=200) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +def start_api_server(host: str = "0.0.0.0", port: int = 8000): + """ + Start the API server using uvicorn. + + Parameters: + host (str): The host for the server. + port (int): The port for the server. + """ + try: + logger.info(f"Starting server at {host}:{port}") + uvicorn.run(app, host=host, port=port) + except Exception as e: + logger.exception(f"Failed to start server: {e}") + # Here you could add any cleanup code or error recovery code. + + +if __name__ == "__main__": + start_api_server() diff --git a/Dashboard_example.png b/assets/Dashboard_example.png similarity index 100% rename from Dashboard_example.png rename to assets/Dashboard_example.png diff --git a/level_4/User_graph.png b/assets/User_graph.png similarity index 100% rename from level_4/User_graph.png rename to assets/User_graph.png diff --git a/infographic_final.png b/assets/infographic_final.png similarity index 100% rename from infographic_final.png rename to assets/infographic_final.png diff --git a/bin/dockerize b/bin/dockerize index a8f465360..30cfede3c 100755 --- a/bin/dockerize +++ b/bin/dockerize @@ -8,11 +8,11 @@ STAGE=${stage:-"dev"} SHA_SHORT="$(git rev-parse --short HEAD)" CUR_DATE="$(date +%Y%m%d%H%M%S)" VERSION="$STAGE-$CUR_DATE-$SHA_SHORT" -IMAGE_NAME=${image_name:-promethai-${STAGE}-promethai-backend-memory} +IMAGE_NAME=${image_name:-promethai-${STAGE}-promethai-backend} REPO_NAME="${AWS_REPOSITORY}/${IMAGE_NAME}" FULL_IMAGE_NAME="${REPO_NAME}:${VERSION}" -APP_DIR=${app_dir:-"level_2/"} # Updated this line +APP_DIR=${app_dir:-"."} PUBLISH=${publish:-false} @@ -33,4 +33,4 @@ if [ "${PUBLISH}" = true ]; then fi docker push "${FULL_IMAGE_NAME}" && echo "Successfully pushed docker image ${FULL_IMAGE_NAME} to ECR repository" -fi +fi \ No newline at end of file diff --git a/level_4/cognitive_architecture/__init__.py b/cognitive_architecture/__init__.py similarity index 100% rename from level_4/cognitive_architecture/__init__.py rename to cognitive_architecture/__init__.py diff --git a/level_4/cognitive_architecture/classifiers/__init__.py b/cognitive_architecture/classifiers/__init__.py similarity index 100% rename from level_4/cognitive_architecture/classifiers/__init__.py rename to cognitive_architecture/classifiers/__init__.py diff --git a/level_4/cognitive_architecture/classifiers/classifier.py b/cognitive_architecture/classifiers/classifier.py similarity index 67% rename from level_4/cognitive_architecture/classifiers/classifier.py rename to cognitive_architecture/classifiers/classifier.py index bd337b7b0..d0025fa72 100644 --- a/level_4/cognitive_architecture/classifiers/classifier.py +++ b/cognitive_architecture/classifiers/classifier.py @@ -72,12 +72,41 @@ def classify_retrieval(): pass +async def classify_user_input(query, input_type): + + llm = ChatOpenAI(temperature=0, model=config.model) + prompt_classify = ChatPromptTemplate.from_template( + """You are a classifier. Determine with a True or False if the following input: {query}, is relevant for the following memory category: {input_type}""" + ) + json_structure = [{ + "name": "classifier", + "description": "Classification", + "parameters": { + "type": "object", + "properties": { + "InputClassification": { + "type": "boolean", + "description": "The classification of the input" + } + }, "required": ["InputClassification"] } + }] + chain_filter = prompt_classify | llm.bind(function_call={"name": "classifier"}, functions=json_structure) + classifier_output = await chain_filter.ainvoke({"query": query, "input_type": input_type}) + arguments_str = classifier_output.additional_kwargs['function_call']['arguments'] + logging.info("This is the arguments string %s", arguments_str) + arguments_dict = json.loads(arguments_str) + logging.info("Relevant summary is %s", arguments_dict.get('DocumentSummary', None)) + InputClassification = arguments_dict.get('InputClassification', None) + logging.info("This is the classification %s", InputClassification) + return InputClassification + + # classify documents according to type of document -async def classify_call(query, context, document_types): +async def classify_call(query, document_summaries): llm = ChatOpenAI(temperature=0, model=config.model) prompt_classify = ChatPromptTemplate.from_template( - """You are a classifier. Determine what document types are relevant : {query}, Context: {context}, Book_types:{document_types}""" + """You are a classifier. Determine what document are relevant for the given query: {query}, Document summaries and ids:{document_summaries}""" ) json_structure = [{ "name": "classifier", @@ -85,24 +114,29 @@ async def classify_call(query, context, document_types): "parameters": { "type": "object", "properties": { - "DocumentCategory": { + "DocumentSummary": { "type": "string", - "description": "The classification of documents in groups such as legal, medical, etc." + "description": "The summary of the document and the topic it deals with." + }, + "d_id": { + "type": "string", + "description": "The id of the document" } - }, "required": ["DocumentCategory"] } + }, "required": ["DocumentSummary"] } }] chain_filter = prompt_classify | llm.bind(function_call={"name": "classifier"}, functions=json_structure) - classifier_output = await chain_filter.ainvoke({"query": query, "context": context, "document_types": document_types}) + classifier_output = await chain_filter.ainvoke({"query": query, "document_summaries": document_summaries}) arguments_str = classifier_output.additional_kwargs['function_call']['arguments'] print("This is the arguments string", arguments_str) arguments_dict = json.loads(arguments_str) - classfier_value = arguments_dict.get('DocumentCategory', None) + logging.info("Relevant summary is %s", arguments_dict.get('DocumentSummary', None)) + classfier_id = arguments_dict.get('d_id', None) - print("This is the classifier value", classfier_value) + print("This is the classifier id ", classfier_id) - return classfier_value + return classfier_id diff --git a/level_4/cognitive_architecture/config.py b/cognitive_architecture/config.py similarity index 73% rename from level_4/cognitive_architecture/config.py rename to cognitive_architecture/config.py index c2694f637..22e0be9ae 100644 --- a/level_4/cognitive_architecture/config.py +++ b/cognitive_architecture/config.py @@ -31,11 +31,27 @@ class Config: embedding_chunk_size: int = 300 # Database parameters - graph_database_url: str = os.getenv('GRAPH_DB_URL') - graph_database_username: str = os.getenv('GRAPH_DB_USER') - graph_database_password: str = os.getenv('GRAPH_DB_PW') + if os.getenv('ENV') == 'prod' or os.getenv('ENV') == 'dev' or os.getenv('AWS_ENV') == 'dev' or os.getenv('AWS_ENV') == 'prd': + graph_database_url: str = os.getenv('GRAPH_DB_URL_PROD') + graph_database_username: str = os.getenv('GRAPH_DB_USER') + graph_database_password: str = os.getenv('GRAPH_DB_PW') + else: + graph_database_url: str = os.getenv('GRAPH_DB_URL') + graph_database_username: str = os.getenv('GRAPH_DB_USER') + graph_database_password: str = os.getenv('GRAPH_DB_PW') weaviate_url: str = os.getenv('WEAVIATE_URL') weaviate_api_key: str = os.getenv('WEAVIATE_API_KEY') + postgres_user: str = os.getenv('POSTGRES_USER') + postgres_password: str = os.getenv('POSTGRES_PASSWORD') + postgres_db: str = os.getenv('POSTGRES_DB') + if os.getenv('ENV') == 'prod' or os.getenv('ENV') == 'dev' or os.getenv('AWS_ENV') == 'dev' or os.getenv('AWS_ENV') == 'prd': + postgres_host: str = os.getenv('POSTGRES_PROD_HOST') + elif os.getenv('ENV') == 'docker': + postgres_host: str = os.getenv('POSTGRES_HOST_DOCKER') + elif os.getenv('ENV') == 'local': + postgres_host: str = os.getenv('POSTGRES_HOST_LOCAL') + + diff --git a/level_4/cognitive_architecture/database/__init__.py b/cognitive_architecture/database/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/__init__.py rename to cognitive_architecture/database/__init__.py diff --git a/level_4/cognitive_architecture/database/create_database.py b/cognitive_architecture/database/create_database.py similarity index 56% rename from level_4/cognitive_architecture/database/create_database.py rename to cognitive_architecture/database/create_database.py index 578d80b15..2d0107807 100644 --- a/level_4/cognitive_architecture/database/create_database.py +++ b/cognitive_architecture/database/create_database.py @@ -9,6 +9,10 @@ from postgres.models import sessions from postgres.models import user from postgres.models import docs +# from cognitive_architecture.config import Config +# config = Config() +# config.load() + from postgres.database import Base @@ -20,7 +24,24 @@ import os +import os + +if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') +elif os.environ.get('AWS_ENV') == 'local': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') +else: + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') @@ -50,22 +71,32 @@ def create_database(username, password, host, db_name): engine.dispose() +def drop_database(username, password, host, db_name): + engine = create_admin_engine(username, password, host) + connection = engine.raw_connection() + connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + cursor = connection.cursor() + cursor.execute(f"DROP DATABASE IF EXISTS {db_name}") + cursor.close() + connection.close() + engine.dispose() + print(f"Database {db_name} dropped successfully.") + + + def create_tables(engine): Base.metadata.create_all(bind=engine) if __name__ == "__main__": - username = os.getenv('POSTGRES_USER') - password = os.getenv('POSTGRES_PASSWORD') - database_name = os.getenv('POSTGRES_DB') - environment = os.environ.get("ENVIRONMENT") - - if environment == "local": - host = os.getenv('POSTGRES_HOST') - - elif environment == "docker": - host = os.getenv('POSTGRES_HOST_DOCKER') - else: - host = os.getenv('POSTGRES_HOST_DOCKER') + # + # username = os.getenv('POSTGRES_USER') + # password = os.getenv('POSTGRES_PASSWORD') + # database_name = os.getenv('POSTGRES_DB') + # environment = os.environ.get("ENV") + # host = config.postgres_host + # username = config.postgres_user + # password = config.postgres_password + # database_name = config.postgres_db engine = create_admin_engine(username, password, host, database_name) diff --git a/cognitive_architecture/database/create_database_tst.py b/cognitive_architecture/database/create_database_tst.py new file mode 100644 index 000000000..3af3a42f5 --- /dev/null +++ b/cognitive_architecture/database/create_database_tst.py @@ -0,0 +1,112 @@ +# this is needed to import classes from other modules +# script_dir = os.path.dirname(os.path.abspath(__file__)) +# # Get the parent directory of your script and add it to sys.path +# parent_dir = os.path.dirname(script_dir) +# sys.path.append(parent_dir) +from cognitive_architecture.database.postgres.models import memory +from cognitive_architecture.database.postgres.models import metadatas +from cognitive_architecture.database.postgres.models import operation +from cognitive_architecture.database.postgres.models import sessions +from cognitive_architecture.database.postgres.models import user +from cognitive_architecture.database.postgres.models import docs +# from cognitive_architecture.config import Config +# config = Config() +# config.load() + + + +from postgres.database import Base + +from sqlalchemy import create_engine, text +import psycopg2 +from dotenv import load_dotenv +load_dotenv() +import os + + +import os + + +if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') +elif os.environ.get('AWS_ENV') == 'local': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') +else: + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') + + + +def create_admin_engine(username, password, host, database_name): + admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}" + return create_engine(admin_url) + + +def database_exists(username, password, host, db_name): + engine = create_admin_engine(username, password, host, db_name) + connection = engine.connect() + query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'") + result = connection.execute(query).fetchone() + connection.close() + engine.dispose() + return result is not None + + +def create_database(username, password, host, db_name): + engine = create_admin_engine(username, password, host, db_name) + connection = engine.raw_connection() + connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + cursor = connection.cursor() + cursor.execute(f"CREATE DATABASE {db_name}") + cursor.close() + connection.close() + engine.dispose() + + +def drop_database(username, password, host, db_name): + engine = create_admin_engine(username, password, host) + connection = engine.raw_connection() + connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + cursor = connection.cursor() + cursor.execute(f"DROP DATABASE IF EXISTS {db_name}") + cursor.close() + connection.close() + engine.dispose() + print(f"Database {db_name} dropped successfully.") + + + +def create_tables(engine): + Base.metadata.create_all(bind=engine) + +if __name__ == "__main__": + # + # username = os.getenv('POSTGRES_USER') + # password = os.getenv('POSTGRES_PASSWORD') + # database_name = os.getenv('POSTGRES_DB') + # environment = os.environ.get("ENV") + # host = config.postgres_host + # username = config.postgres_user + # password = config.postgres_password + # database_name = config.postgres_db + + engine = create_admin_engine(username, password, host, database_name) + + print(Base.metadata.tables) + + if not database_exists(username, password, host, database_name): + print(f"Database {database_name} does not exist. Creating...") + create_database(username, password, host, database_name) + print(f"Database {database_name} created successfully.") + + create_tables(engine) + + diff --git a/level_4/cognitive_architecture/database/graph_database/__init__.py b/cognitive_architecture/database/graph_database/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/graph_database/__init__.py rename to cognitive_architecture/database/graph_database/__init__.py diff --git a/cognitive_architecture/database/graph_database/graph.py b/cognitive_architecture/database/graph_database/graph.py new file mode 100644 index 000000000..98f16bb28 --- /dev/null +++ b/cognitive_architecture/database/graph_database/graph.py @@ -0,0 +1,769 @@ + +import logging +import os + +from neo4j import AsyncSession +from neo4j.exceptions import Neo4jError + +print(os.getcwd()) + +import networkx as nx + +from langchain.graphs import Neo4jGraph +import os +from dotenv import load_dotenv + +import openai +import instructor +from openai import OpenAI +from openai import AsyncOpenAI +import pickle + +from abc import ABC, abstractmethod + +# Adds response_model to ChatCompletion +# Allows the return of Pydantic model rather than raw JSON + +from pydantic import BaseModel, Field +from typing import List, Dict, Optional +from ...utils import format_dict, append_uuid_to_variable_names, create_edge_variable_mapping, \ + create_node_variable_mapping, get_unsumarized_vector_db_namespace +from ...llm.queries import generate_summary, generate_graph + +DEFAULT_PRESET = "promethai_chat" +preset_options = [DEFAULT_PRESET] +PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".") +load_dotenv() + +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") +from ...config import Config + +from ...shared.data_models import Node, Edge, KnowledgeGraph, GraphQLQuery, MemorySummary + +config = Config() +config.load() + +print(config.model) +print(config.openai_key) + +OPENAI_API_KEY = config.openai_key + +aclient = instructor.patch(OpenAI()) + +class AbstractGraphDB(ABC): + + @abstractmethod + def query(self, query: str, params=None): + pass + + # @abstractmethod + # def create_nodes(self, nodes: List[dict]): + # pass + # + # @abstractmethod + # def create_edges(self, edges: List[dict]): + # pass + # + # @abstractmethod + # def create_memory_type_relationships(self, nodes: List[dict], memory_type: str): + # pass + + +class Neo4jGraphDB(AbstractGraphDB): + def __init__(self, url, username, password): + # self.graph = Neo4jGraph(url=url, username=username, password=password) + from neo4j import GraphDatabase + self.driver = GraphDatabase.driver(url, auth=(username, password)) + self.openai_key = config.openai_key + + + + def close(self): + # Method to close the Neo4j driver instance + self.driver.close() + + def query(self, query, params=None): + try: + with self.driver.session() as session: + result = session.run(query, params).data() + return result + except Exception as e: + logging.error(f"An error occurred while executing the query: {e}") + raise e + + + + def create_base_cognitive_architecture(self, user_id: str): + # Create the user and memory components if they don't exist + user_memory_cypher = f""" + MERGE (user:User {{userId: '{user_id}'}}) + MERGE (semantic:SemanticMemory {{description: 'SemanticMemory', userId: '{user_id}' }}) + MERGE (episodic:EpisodicMemory {{description: 'EpisodicMemory' , userId: '{user_id}'}}) + MERGE (buffer:Buffer {{description: 'Buffer' , userId: '{user_id}' }}) + MERGE (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) + MERGE (user)-[:HAS_EPISODIC_MEMORY]->(episodic) + MERGE (user)-[:HAS_BUFFER]->(buffer) + """ + return user_memory_cypher + + async def retrieve_memory(self, user_id: str, memory_type: str, timestamp: float = None, summarized: bool = None): + if memory_type == 'SemanticMemory': + relationship = 'SEMANTIC_MEMORY' + memory_rel = 'HAS_KNOWLEDGE' + elif memory_type == 'EpisodicMemory': + relationship = 'EPISODIC_MEMORY' + memory_rel = 'HAS_EVENT' + elif memory_type == 'Buffer': + relationship = 'BUFFER' + memory_rel = 'CURRENTLY_HOLDING' + if timestamp is not None and summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_{relationship}]->(memory:{memory_type}) + MATCH (memory)-[:{memory_rel}]->(item) + WHERE item.created_at >= {timestamp} AND item.summarized = {str(summarized).lower()} + RETURN item + """ + elif timestamp is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_{relationship}]->(memory:{memory_type}) + MATCH (memory)-[:{memory_rel}]->(item) + WHERE item.created_at >= {timestamp} + RETURN item + """ + elif summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_{relationship}]->(memory:{memory_type}) + MATCH (memory)-[:{memory_rel}]->(item) + WHERE item.summarized = {str(summarized).lower()} + RETURN item + """ + print(query) + else: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_{relationship}]->(memory:{memory_type}) + MATCH (memory)-[:{memory_rel}]->(item) + RETURN item + """ + output = self.query(query, params={"user_id": user_id}) + print("Here is the output", output) + + reduced_graph = await generate_summary(input = output) + return reduced_graph + + + def cypher_statement_correcting(self, input: str) ->str: + return aclient.chat.completions.create( + model=config.model, + messages=[ + { + "role": "user", + "content": f"""Check the cypher query for syntax issues, and fix any if found and return it as is: {input}. """, + + }, + {"role": "system", "content": """You are a top-tier algorithm + designed for checking cypher queries for neo4j graph databases. You have to return input provided to you as is"""} + ], + response_model=GraphQLQuery, + ) + + def generate_create_statements_for_nodes_with_uuid(self, nodes, unique_mapping, base_node_mapping): + create_statements = [] + for node in nodes: + original_variable_name = base_node_mapping[node['id']] + unique_variable_name = unique_mapping[original_variable_name] + node_label = node['category'].capitalize() + properties = {k: v for k, v in node.items() if k not in ['id', 'category']} + try: + properties = format_dict(properties) + except: + pass + create_statements.append(f"CREATE ({unique_variable_name}:{node_label} {properties})") + return create_statements + + # Update the function to generate Cypher CREATE statements for edges with unique variable names + def generate_create_statements_for_edges_with_uuid(self, user_id, edges, unique_mapping, base_node_mapping): + create_statements = [] + with_statement = f"WITH {', '.join(unique_mapping.values())}, user , semantic, episodic, buffer" + create_statements.append(with_statement) + + for edge in edges: + # print("HERE IS THE EDGE", edge) + source_variable = unique_mapping[base_node_mapping[edge['source']]] + target_variable = unique_mapping[base_node_mapping[edge['target']]] + relationship = edge['description'].replace(" ", "_").upper() + create_statements.append(f"CREATE ({source_variable})-[:{relationship}]->({target_variable})") + return create_statements + + def generate_memory_type_relationships_with_uuid_and_time_context(self, user_id, nodes, unique_mapping, base_node_mapping): + create_statements = [] + with_statement = f"WITH {', '.join(unique_mapping.values())}, user, semantic, episodic, buffer" + create_statements.append(with_statement) + + # Loop through each node and create relationships based on memory_type + for node in nodes: + original_variable_name = base_node_mapping[node['id']] + unique_variable_name = unique_mapping[original_variable_name] + if node['memory_type'] == 'semantic': + create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_variable_name})") + elif node['memory_type'] == 'episodic': + create_statements.append(f"CREATE (episodic)-[:HAS_EVENT]->({unique_variable_name})") + if node['category'] == 'time': + create_statements.append(f"CREATE (buffer)-[:HAS_TIME_CONTEXT]->({unique_variable_name})") + + # Assuming buffer holds all actions and times + # if node['category'] in ['action', 'time']: + create_statements.append(f"CREATE (buffer)-[:CURRENTLY_HOLDING]->({unique_variable_name})") + + return create_statements + + async def generate_cypher_query_for_user_prompt_decomposition(self, user_id:str, query:str): + + graph: KnowledgeGraph = generate_graph(query) + import time + for node in graph.nodes: + node.created_at = time.time() + node.summarized = False + + for edge in graph.edges: + edge.created_at = time.time() + edge.summarized = False + graph_dic = graph.dict() + + node_variable_mapping = create_node_variable_mapping(graph_dic['nodes']) + edge_variable_mapping = create_edge_variable_mapping(graph_dic['edges']) + # Create unique variable names for each node + unique_node_variable_mapping = append_uuid_to_variable_names(node_variable_mapping) + unique_edge_variable_mapping = append_uuid_to_variable_names(edge_variable_mapping) + create_nodes_statements = self.generate_create_statements_for_nodes_with_uuid(graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping) + create_edges_statements =self.generate_create_statements_for_edges_with_uuid(user_id, graph_dic['edges'], unique_node_variable_mapping, node_variable_mapping) + + memory_type_statements_with_uuid_and_time_context = self.generate_memory_type_relationships_with_uuid_and_time_context(user_id, + graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping) + + # # Combine all statements + cypher_statements = [self.create_base_cognitive_architecture(user_id)] + create_nodes_statements + create_edges_statements + memory_type_statements_with_uuid_and_time_context + cypher_statements_joined = "\n".join(cypher_statements) + logging.info("User Cypher Query raw: %s", cypher_statements_joined) + # corrected_cypher_statements = self.cypher_statement_correcting(input = cypher_statements_joined) + # logging.info("User Cypher Query: %s", corrected_cypher_statements.query) + # return corrected_cypher_statements.query + return cypher_statements_joined + + + def update_user_query_for_user_prompt_decomposition(self, user_id, user_query): + pass + + + def delete_all_user_memories(self, user_id): + try: + # Check if the user exists + user_exists = self.query(f"MATCH (user:User {{userId: '{user_id}'}}) RETURN user") + if not user_exists: + return f"No user found with ID: {user_id}" + + # Delete all memory nodes and relationships for the given user + delete_query = f""" + MATCH (user:User {{userId: '{user_id}'}})-[r]-() + DELETE r + WITH user + MATCH (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) + MATCH (user)-[:HAS_EPISODIC_MEMORY]->(episodic) + MATCH (user)-[:HAS_BUFFER]->(buffer) + DETACH DELETE semantic, episodic, buffer + """ + self.query(delete_query) + return f"All memories deleted for user ID: {user_id}" + except Exception as e: + return f"An error occurred: {str(e)}" + + def delete_specific_memory_type(self, user_id, memory_type): + try: + # Check if the user exists + user_exists = self.query(f"MATCH (user:User {{userId: '{user_id}'}}) RETURN user") + if not user_exists: + return f"No user found with ID: {user_id}" + + # Validate memory type + if memory_type not in ['SemanticMemory', 'EpisodicMemory', 'Buffer']: + return "Invalid memory type. Choose from 'SemanticMemory', 'EpisodicMemory', or 'Buffer'." + + # Delete specific memory type nodes and relationships for the given user + delete_query = f""" + MATCH (user:User {{userId: '{user_id}'}})-[:HAS_{memory_type.upper()}]->(memory) + DETACH DELETE memory + """ + self.query(delete_query) + return f"{memory_type} deleted for user ID: {user_id}" + except Exception as e: + return f"An error occurred: {str(e)}" + + def retrieve_semantic_memory(self, user_id: str, timestamp: float = None, summarized: bool = None): + if timestamp is not None and summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory) + MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge) + WHERE knowledge.created_at >= {timestamp} AND knowledge.summarized = {str(summarized).lower()} + RETURN knowledge + """ + elif timestamp is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory) + MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge) + WHERE knowledge.created_at >= {timestamp} + RETURN knowledge + """ + elif summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory) + MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge) + WHERE knowledge.summarized = {str(summarized).lower()} + RETURN knowledge + """ + else: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory) + MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge) + RETURN knowledge + """ + return self.query(query, params={"user_id": user_id}) + + def retrieve_episodic_memory(self, user_id: str, timestamp: float = None, summarized: bool = None): + if timestamp is not None and summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_EPISODIC_MEMORY]->(episodic:EpisodicMemory) + MATCH (episodic)-[:HAS_EVENT]->(event) + WHERE event.created_at >= {timestamp} AND event.summarized = {str(summarized).lower()} + RETURN event + """ + elif timestamp is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_EPISODIC_MEMORY]->(episodic:EpisodicMemory) + MATCH (episodic)-[:HAS_EVENT]->(event) + WHERE event.created_at >= {timestamp} + RETURN event + """ + elif summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_EPISODIC_MEMORY]->(episodic:EpisodicMemory) + MATCH (episodic)-[:HAS_EVENT]->(event) + WHERE event.summarized = {str(summarized).lower()} + RETURN event + """ + else: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_EPISODIC_MEMORY]->(episodic:EpisodicMemory) + MATCH (episodic)-[:HAS_EVENT]->(event) + RETURN event + """ + return self.query(query, params={"user_id": user_id}) + + + def retrieve_buffer_memory(self, user_id: str, timestamp: float = None, summarized: bool = None): + if timestamp is not None and summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_BUFFER]->(buffer:Buffer) + MATCH (buffer)-[:CURRENTLY_HOLDING]->(item) + WHERE item.created_at >= {timestamp} AND item.summarized = {str(summarized).lower()} + RETURN item + """ + elif timestamp is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_BUFFER]->(buffer:Buffer) + MATCH (buffer)-[:CURRENTLY_HOLDING]->(item) + WHERE item.created_at >= {timestamp} + RETURN item + """ + elif summarized is not None: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_BUFFER]->(buffer:Buffer) + MATCH (buffer)-[:CURRENTLY_HOLDING]->(item) + WHERE item.summarized = {str(summarized).lower()} + RETURN item + """ + else: + query = f""" + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_BUFFER]->(buffer:Buffer) + MATCH (buffer)-[:CURRENTLY_HOLDING]->(item) + RETURN item + """ + return self.query(query, params={"user_id": user_id}) + + + + def retrieve_public_memory(self, user_id: str): + query = """ + MATCH (user:User {userId: $user_id})-[:HAS_PUBLIC_MEMORY]->(public:PublicMemory) + MATCH (public)-[:HAS_DOCUMENT]->(document) + RETURN document + """ + return self.query(query, params={"user_id": user_id}) + def generate_graph_semantic_memory_document_summary(self, document_summary : str, unique_graphdb_mapping_values: dict, document_namespace: str): + """ This function takes a document and generates a document summary in Semantic Memory""" + create_statements = [] + with_statement = f"WITH {', '.join(unique_graphdb_mapping_values.values())}, user, semantic, episodic, buffer" + create_statements.append(with_statement) + + # Loop through each node and create relationships based on memory_type + + create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_graphdb_mapping_values})") + + + return create_statements + + + def generate_document_summary(self, document_summary : str, unique_graphdb_mapping_values: dict, document_namespace: str): + """ This function takes a document and generates a document summary in Semantic Memory""" + + + # fetch namespace from postgres db + # fetch 1st and last page from vector store + # summarize the text, add document type + # write to postgres + create_statements = [] + with_statement = f"WITH {', '.join(unique_graphdb_mapping_values.values())}, user, semantic, episodic, buffer" + create_statements.append(with_statement) + + # Loop through each node and create relationships based on memory_type + + create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_graphdb_mapping_values})") + + + return create_statements + + async def get_memory_linked_document_summaries(self, user_id: str, memory_type: str = "PublicMemory"): + """ + Retrieve a list of summaries for all documents associated with a given memory type for a user. + + Args: + user_id (str): The unique identifier of the user. + memory_type (str): The type of memory node ('SemanticMemory' or 'PublicMemory'). + + Returns: + List[Dict[str, Union[str, None]]]: A list of dictionaries containing document summary and d_id. + + Raises: + Exception: If an error occurs during the database query execution. + """ + if memory_type == "PublicMemory": + relationship = "HAS_PUBLIC_MEMORY" + elif memory_type == "SemanticMemory": + relationship = "HAS_SEMANTIC_MEMORY" + try: + query = f''' + MATCH (user:User {{userId: '{user_id}'}})-[:{relationship}]->(memory:{memory_type})-[:HAS_DOCUMENT]->(document:Document) + RETURN document.d_id AS d_id, document.summary AS summary + ''' + logging.info(f"Generated Cypher query: {query}") + result = self.query(query) + logging.info(f"Result: {result}") + return [{"d_id": record.get("d_id", None), "summary": record.get("summary", "No summary available")} for + record in result] + + except Exception as e: + logging.error(f"An error occurred while retrieving document summary: {str(e)}") + return None + + + + async def get_memory_linked_document_ids(self, user_id: str, summary_id: str, memory_type: str = "PublicMemory"): + """ + Retrieve a list of document IDs for a specific category associated with a given memory type for a user. + + Args: + user_id (str): The unique identifier of the user. + summary_id (str): The specific document summary id to filter by. + memory_type (str): The type of memory node ('SemanticMemory' or 'PublicMemory'). + + Returns: + List[str]: A list of document IDs in the specified category associated with the memory type for the user. + + Raises: + Exception: If an error occurs during the database query execution. + """ + + if memory_type == "PublicMemory": + relationship = "HAS_PUBLIC_MEMORY" + elif memory_type == "SemanticMemory": + relationship = "HAS_SEMANTIC_MEMORY" + try: + query = f''' + MATCH (user:User {{userId: '{user_id}'}})-[:{relationship}]->(memory:{memory_type})-[:HAS_DOCUMENT]->(document:Document) + WHERE document.d_id = '{summary_id}' + RETURN document.d_id AS d_id + ''' + logging.info(f"Generated Cypher query: {query}") + result = self.query(query) + return [record["d_id"] for record in result] + except Exception as e: + logging.error(f"An error occurred while retrieving document IDs: {str(e)}") + return None + + + def create_document_node_cypher(self, document_summary: dict, user_id: str, + memory_type: str = "PublicMemory",public_memory_id:str=None) -> str: + """ + Generate a Cypher query to create a Document node. If the memory type is 'Semantic', + link it to a SemanticMemory node for a user. If the memory type is 'PublicMemory', + only link the Document node to the PublicMemory node. + + Parameters: + - document_summary (dict): A dictionary containing the document's category, title, summary, and document ID. + - user_id (str): The unique identifier for the user. + - memory_type (str): The type of memory node to link ("Semantic" or "PublicMemory"). Default is "PublicMemory". + + Returns: + - str: A Cypher query string with parameters. + + Raises: + - ValueError: If any required data is missing or invalid. + """ + + # Validate the input parameters + if not isinstance(document_summary, dict): + raise ValueError("The document_summary must be a dictionary.") + if not all(key in document_summary for key in ['DocumentCategory', 'Title', 'Summary', 'd_id']): + raise ValueError("The document_summary dictionary is missing required keys.") + if not isinstance(user_id, str) or not user_id: + raise ValueError("The user_id must be a non-empty string.") + if memory_type not in ["SemanticMemory", "PublicMemory"]: + raise ValueError("The memory_type must be either 'Semantic' or 'PublicMemory'.") + + # Escape single quotes in the document summary data + title = document_summary['Title'].replace("'", "\\'") + summary = document_summary['Summary'].replace("'", "\\'") + document_category = document_summary['DocumentCategory'].replace("'", "\\'") + d_id = document_summary['d_id'].replace("'", "\\'") + + memory_node_type = "SemanticMemory" if memory_type == "SemanticMemory" else "PublicMemory" + + user_memory_link = '' + if memory_type == "SemanticMemory": + user_memory_link = f''' + // Ensure the User node exists + MERGE (user:User {{ userId: '{user_id}' }}) + MERGE (memory:SemanticMemory {{ userId: '{user_id}' }}) + MERGE (user)-[:HAS_SEMANTIC_MEMORY]->(memory) + ''' + elif memory_type == "PublicMemory": + logging.info(f"Public memory id: {public_memory_id}") + user_memory_link = f''' + // Merge with the existing PublicMemory node or create a new one if it does not exist + MATCH (memory:PublicMemory {{ memoryId: {public_memory_id} }}) + ''' + + cypher_query = f''' + {user_memory_link} + + // Create the Document node with its properties + CREATE (document:Document {{ + title: '{title}', + summary: '{summary}', + documentCategory: '{document_category}', + d_id: '{d_id}', + created_at: timestamp(), + }}) + + // Link the Document node to the {memory_node_type} node + MERGE (memory)-[:HAS_DOCUMENT]->(document) + ''' + + logging.info(f"Generated Cypher query: {cypher_query}") + + return cypher_query + + def update_document_node_with_db_ids(self, vectordb_namespace: str, document_id: str, user_id: str = None): + """ + Update the namespace of a Document node in the database. The document can be linked + either to a SemanticMemory node (if a user ID is provided) or to a PublicMemory node. + + Parameters: + - vectordb_namespace (str): The namespace to set for the vectordb. + - document_id (str): The unique identifier of the document. + - user_id (str, optional): The unique identifier for the user. Default is None. + + Returns: + - str: A Cypher query string to perform the update. + """ + + if user_id: + # Update for a document linked to a SemanticMemory node + cypher_query = f''' + MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document {{d_id: '{document_id}'}}) + SET document.vectordbNamespace = '{vectordb_namespace}' + RETURN document + ''' + else: + # Update for a document linked to a PublicMemory node + cypher_query = f''' + MATCH (:PublicMemory)-[:HAS_DOCUMENT]->(document:Document {{d_id: '{document_id}'}}) + SET document.vectordbNamespace = '{vectordb_namespace}' + RETURN document + ''' + + return cypher_query + + def get_namespaces_by_document_category(self, user_id: str, category: str): + """ + Retrieve a list of Vectordb namespaces for documents of a specified category associated with a given user. + + This function executes a Cypher query in a Neo4j database to fetch the 'vectordbNamespace' of all 'Document' nodes + that are linked to the 'SemanticMemory' node of the specified user and belong to the specified category. + + Parameters: + - user_id (str): The unique identifier of the user. + - category (str): The category to filter the documents by. + + Returns: + - List[str]: A list of Vectordb namespaces for documents in the specified category. + + Raises: + - Exception: If an error occurs during the database query execution. + """ + try: + query = f''' + MATCH (user:User {{userId: '{user_id}'}})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document) + WHERE document.documentCategory = '{category}' + RETURN document.vectordbNamespace AS namespace + ''' + result = self.query(query) + namespaces = [record["namespace"] for record in result] + return namespaces + except Exception as e: + logging.error(f"An error occurred while retrieving namespaces by document category: {str(e)}") + return None + + async def create_memory_node(self, labels, topic=None): + """ + Create or find a memory node of the specified type with labels and a description. + + Args: + labels (List[str]): A list of labels for the node. + topic (str, optional): The type of memory node to create or find. Defaults to "PublicMemory". + + Returns: + int: The ID of the created or found memory node. + + Raises: + ValueError: If input parameters are invalid. + Neo4jError: If an error occurs during the database operation. + """ + if topic is None: + topic = "PublicMemory" + + # Prepare labels as a string + label_list = ', '.join(f"'{label}'" for label in labels) + + # Cypher query to find or create the memory node with the given description and labels + memory_cypher = f""" + MERGE (memory:{topic} {{description: '{topic}', label: [{label_list}]}}) + SET memory.memoryId = ID(memory) + RETURN id(memory) AS memoryId + """ + + try: + result = self.query(memory_cypher) + # Assuming the result is a list of records, where each record contains 'memoryId' + memory_id = result[0]['memoryId'] if result else None + self.close() + return memory_id + except Neo4jError as e: + logging.error(f"Error creating or finding memory node: {e}") + raise + + def link_user_to_public(self, user_id: str, public_property_value: str, public_property_name: str = 'name', + relationship_type: str = 'HAS_PUBLIC'): + if not user_id or not public_property_value: + raise ValueError("Valid User ID and Public property value are required for linking.") + + try: + link_cypher = f""" + MATCH (user:User {{userId: '{user_id}'}}) + MATCH (public:Public {{{public_property_name}: '{public_property_value}'}}) + MERGE (user)-[:{relationship_type}]->(public) + """ + self.query(link_cypher) + except Neo4jError as e: + logging.error(f"Error linking Public node to user: {e}") + raise + + def delete_memory_node(self, memory_id: int, topic: str) -> None: + if not memory_id or not topic: + raise ValueError("Memory ID and Topic are required for deletion.") + + try: + delete_cypher = f""" + MATCH ({topic.lower()}: {topic}) WHERE id({topic.lower()}) = {memory_id} + DETACH DELETE {topic.lower()} + """ + logging.info("Delete Cypher Query: %s", delete_cypher) + self.query(delete_cypher) + except Neo4jError as e: + logging.error(f"Error deleting {topic} memory node: {e}") + raise + + def unlink_memory_from_user(self, memory_id: int, user_id: str, topic: str='PublicMemory') -> None: + """ + Unlink a memory node from a user node. + + Parameters: + - memory_id (int): The internal ID of the memory node. + - user_id (str): The unique identifier for the user. + - memory_type (str): The type of memory node to unlink ("SemanticMemory" or "PublicMemory"). + + Raises: + - ValueError: If any required data is missing or invalid. + """ + + if not user_id or not isinstance(memory_id, int): + raise ValueError("Valid User ID and Memory ID are required for unlinking.") + + if topic not in ["SemanticMemory", "PublicMemory"]: + raise ValueError("The memory_type must be either 'SemanticMemory' or 'PublicMemory'.") + + relationship_type = "HAS_SEMANTIC_MEMORY" if topic == "SemanticMemory" else "HAS_PUBLIC_MEMORY" + + try: + unlink_cypher = f""" + MATCH (user:User {{userId: '{user_id}'}})-[r:{relationship_type}]->(memory:{topic}) WHERE id(memory) = {memory_id} + DELETE r + """ + self.query(unlink_cypher) + except Neo4jError as e: + logging.error(f"Error unlinking {topic} from user: {e}") + raise + + + def link_public_memory_to_user(self, memory_id, user_id): + # Link an existing Public Memory node to a User node + link_cypher = f""" + MATCH (user:User {{userId: '{user_id}'}}) + MATCH (publicMemory:PublicMemory) WHERE id(publicMemory) = {memory_id} + MERGE (user)-[:HAS_PUBLIC_MEMORY]->(publicMemory) + """ + self.query(link_cypher) + + def retrieve_node_id_for_memory_type(self, topic: str = 'SemanticMemory'): + link_cypher = f""" MATCH(publicMemory: {topic}) + RETURN + id(publicMemory) + AS + memoryId """ + node_ids = self.query(link_cypher) + return node_ids + + + + +from networkx_graph import NetworkXGraphDB +class GraphDBFactory: + def create_graph_db(self, db_type, **kwargs): + if db_type == 'neo4j': + return Neo4jGraphDB(**kwargs) + elif db_type == 'networkx': + return NetworkXGraphDB(**kwargs) + else: + raise ValueError(f"Unsupported database type: {db_type}") + + + + diff --git a/cognitive_architecture/database/graph_database/networkx_graph.py b/cognitive_architecture/database/graph_database/networkx_graph.py new file mode 100644 index 000000000..adcabf0a3 --- /dev/null +++ b/cognitive_architecture/database/graph_database/networkx_graph.py @@ -0,0 +1,90 @@ +import pickle + +import networkx as nx + + +class NetworkXGraphDB: + def __init__(self, filename='networkx_graph.pkl'): + self.filename = filename + try: + self.graph = self.load_graph() # Attempt to load an existing graph + except (FileNotFoundError, EOFError, pickle.UnpicklingError): + self.graph = nx.Graph() # Create a new graph if loading failed + + def save_graph(self): + """ Save the graph to a file using pickle """ + with open(self.filename, 'wb') as f: + pickle.dump(self.graph, f) + + def load_graph(self): + """ Load the graph from a file using pickle """ + with open(self.filename, 'rb') as f: + return pickle.load(f) + + def create_base_cognitive_architecture(self, user_id: str): + # Add nodes for user and memory types if they don't exist + self.graph.add_node(user_id, type='User') + self.graph.add_node(f"{user_id}_semantic", type='SemanticMemory') + self.graph.add_node(f"{user_id}_episodic", type='EpisodicMemory') + self.graph.add_node(f"{user_id}_buffer", type='Buffer') + + # Add edges to connect user to memory types + self.graph.add_edge(user_id, f"{user_id}_semantic", relation='HAS_SEMANTIC_MEMORY') + self.graph.add_edge(user_id, f"{user_id}_episodic", relation='HAS_EPISODIC_MEMORY') + self.graph.add_edge(user_id, f"{user_id}_buffer", relation='HAS_BUFFER') + + self.save_graph() # Save the graph after modifying it + + def delete_all_user_memories(self, user_id: str): + # Remove nodes and edges related to the user's memories + for memory_type in ['semantic', 'episodic', 'buffer']: + memory_node = f"{user_id}_{memory_type}" + self.graph.remove_node(memory_node) + + self.save_graph() # Save the graph after modifying it + + def delete_specific_memory_type(self, user_id: str, memory_type: str): + # Remove a specific type of memory node and its related edges + memory_node = f"{user_id}_{memory_type.lower()}" + if memory_node in self.graph: + self.graph.remove_node(memory_node) + + self.save_graph() # Save the graph after modifying it + + def retrieve_semantic_memory(self, user_id: str): + return [n for n in self.graph.neighbors(f"{user_id}_semantic")] + + def retrieve_episodic_memory(self, user_id: str): + return [n for n in self.graph.neighbors(f"{user_id}_episodic")] + + def retrieve_buffer_memory(self, user_id: str): + return [n for n in self.graph.neighbors(f"{user_id}_buffer")] + + def generate_graph_semantic_memory_document_summary(self, document_summary, unique_graphdb_mapping_values, document_namespace, user_id): + for node, attributes in unique_graphdb_mapping_values.items(): + self.graph.add_node(node, **attributes) + self.graph.add_edge(f"{user_id}_semantic", node, relation='HAS_KNOWLEDGE') + self.save_graph() + + def generate_document_summary(self, document_summary, unique_graphdb_mapping_values, document_namespace, user_id): + self.generate_graph_semantic_memory_document_summary(document_summary, unique_graphdb_mapping_values, document_namespace, user_id) + + async def get_document_categories(self, user_id): + return [self.graph.nodes[n]['category'] for n in self.graph.neighbors(f"{user_id}_semantic") if 'category' in self.graph.nodes[n]] + + async def get_document_ids(self, user_id, category): + return [n for n in self.graph.neighbors(f"{user_id}_semantic") if self.graph.nodes[n].get('category') == category] + + def create_document_node(self, document_summary, user_id): + d_id = document_summary['d_id'] + self.graph.add_node(d_id, **document_summary) + self.graph.add_edge(f"{user_id}_semantic", d_id, relation='HAS_DOCUMENT') + self.save_graph() + + def update_document_node_with_namespace(self, user_id, vectordb_namespace, document_id): + if self.graph.has_node(document_id): + self.graph.nodes[document_id]['vectordbNamespace'] = vectordb_namespace + self.save_graph() + + def get_namespaces_by_document_category(self, user_id, category): + return [self.graph.nodes[n].get('vectordbNamespace') for n in self.graph.neighbors(f"{user_id}_semantic") if self.graph.nodes[n].get('category') == category] diff --git a/level_4/cognitive_architecture/database/postgres/__init__.py b/cognitive_architecture/database/postgres/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/__init__.py rename to cognitive_architecture/database/postgres/__init__.py diff --git a/level_4/cognitive_architecture/database/postgres/database.py b/cognitive_architecture/database/postgres/database.py similarity index 67% rename from level_4/cognitive_architecture/database/postgres/database.py rename to cognitive_architecture/database/postgres/database.py index c5ff46dbe..eb344028e 100644 --- a/level_4/cognitive_architecture/database/postgres/database.py +++ b/cognitive_architecture/database/postgres/database.py @@ -15,26 +15,40 @@ # # Get the parent directory of your script and add it to sys.path # parent_dir = os.path.dirname(script_dir) # sys.path.append(parent_dir) +# from ...config import Config +# config = Config() +# config.load() # in seconds MAX_RETRIES = 3 RETRY_DELAY = 5 -username = os.getenv('POSTGRES_USER') -password = os.getenv('POSTGRES_PASSWORD') -database_name = os.getenv('POSTGRES_DB') import os -environment = os.environ.get("ENVIRONMENT") -if environment == "local": - host= os.getenv('POSTGRES_HOST') - -elif environment == "docker": - host= os.getenv('POSTGRES_HOST_DOCKER') +if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') +elif os.environ.get('AWS_ENV') == 'local': + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') else: - host= os.getenv('POSTGRES_HOST_DOCKER') + host = os.environ.get('POSTGRES_HOST') + username = os.environ.get('POSTGRES_USER') + password = os.environ.get('POSTGRES_PASSWORD') + database_name = os.environ.get('POSTGRES_DB') + +# host = config.postgres_host +# username = config.postgres_user +# password = config.postgres_password +# database_name = config.postgres_db + + diff --git a/level_4/cognitive_architecture/database/postgres/database_crud.py b/cognitive_architecture/database/postgres/database_crud.py similarity index 81% rename from level_4/cognitive_architecture/database/postgres/database_crud.py rename to cognitive_architecture/database/postgres/database_crud.py index 3f713acb0..5807de460 100644 --- a/level_4/cognitive_architecture/database/postgres/database_crud.py +++ b/cognitive_architecture/database/postgres/database_crud.py @@ -34,16 +34,24 @@ async def add_entity(session, entity): return "Successfully added entity" - +def update_entity_graph_summary(session, model, entity_id, new_value): + with session_scope(session) as s: + # Retrieve the entity from the database + entity = s.query(model).filter_by(id=entity_id).first() + if entity: + entity.graph_summary = new_value + s.commit() + return "Successfully updated entity" + else: + return "Entity not found" async def update_entity(session, model, entity_id, new_value): async with session_scope(session) as s: # Retrieve the entity from the database entity = await s.get(model, entity_id) if entity: - # Update the relevant column and 'updated_at' will be automatically updated entity.operation_status = new_value - return "Successfully updated entity" + await s.commit() else: return "Entity not found" diff --git a/level_4/cognitive_architecture/database/postgres/models/__init__.py b/cognitive_architecture/database/postgres/models/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/models/__init__.py rename to cognitive_architecture/database/postgres/models/__init__.py diff --git a/level_4/cognitive_architecture/database/postgres/models/docs.py b/cognitive_architecture/database/postgres/models/docs.py similarity index 92% rename from level_4/cognitive_architecture/database/postgres/models/docs.py rename to cognitive_architecture/database/postgres/models/docs.py index 490c687b7..dd694fae1 100644 --- a/level_4/cognitive_architecture/database/postgres/models/docs.py +++ b/cognitive_architecture/database/postgres/models/docs.py @@ -12,6 +12,7 @@ class DocsModel(Base): operation_id = Column(String, ForeignKey('operations.id'), index=True) doc_name = Column(String, nullable=True) graph_summary = Column(Boolean, nullable=True) + memory_category = Column(String, nullable=True) created_at = Column(DateTime, default=datetime.utcnow) updated_at = Column(DateTime, onupdate=datetime.utcnow) diff --git a/level_4/cognitive_architecture/database/postgres/models/memory.py b/cognitive_architecture/database/postgres/models/memory.py similarity index 95% rename from level_4/cognitive_architecture/database/postgres/models/memory.py rename to cognitive_architecture/database/postgres/models/memory.py index ee856f677..a92592c0e 100644 --- a/level_4/cognitive_architecture/database/postgres/models/memory.py +++ b/cognitive_architecture/database/postgres/models/memory.py @@ -12,6 +12,7 @@ class MemoryModel(Base): user_id = Column(String, ForeignKey('users.id'), index=True) operation_id = Column(String, ForeignKey('operations.id'), index=True) memory_name = Column(String, nullable=True) + memory_category = Column(String, nullable=True) created_at = Column(DateTime, default=datetime.utcnow) updated_at = Column(DateTime, onupdate=datetime.utcnow) methods_list = Column(String , nullable=True) diff --git a/level_4/cognitive_architecture/database/postgres/models/metadatas.py b/cognitive_architecture/database/postgres/models/metadatas.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/models/metadatas.py rename to cognitive_architecture/database/postgres/models/metadatas.py diff --git a/level_4/cognitive_architecture/database/postgres/models/operation.py b/cognitive_architecture/database/postgres/models/operation.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/models/operation.py rename to cognitive_architecture/database/postgres/models/operation.py diff --git a/level_4/cognitive_architecture/database/postgres/models/sessions.py b/cognitive_architecture/database/postgres/models/sessions.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/models/sessions.py rename to cognitive_architecture/database/postgres/models/sessions.py diff --git a/level_4/cognitive_architecture/database/postgres/models/user.py b/cognitive_architecture/database/postgres/models/user.py similarity index 100% rename from level_4/cognitive_architecture/database/postgres/models/user.py rename to cognitive_architecture/database/postgres/models/user.py diff --git a/level_4/cognitive_architecture/database/vectordb/__init__.py b/cognitive_architecture/database/vectordb/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/vectordb/__init__.py rename to cognitive_architecture/database/vectordb/__init__.py diff --git a/level_4/cognitive_architecture/database/vectordb/basevectordb.py b/cognitive_architecture/database/vectordb/basevectordb.py similarity index 87% rename from level_4/cognitive_architecture/database/vectordb/basevectordb.py rename to cognitive_architecture/database/vectordb/basevectordb.py index 8d6d73ddb..f6a507247 100644 --- a/level_4/cognitive_architecture/database/vectordb/basevectordb.py +++ b/cognitive_architecture/database/vectordb/basevectordb.py @@ -231,38 +231,6 @@ async def add_memories( embeddings: Optional[str] = None, ): - # from ast import literal_eval - # class DynamicSchema(Schema): - # pass - # - # default_version = 'current_timestamp' - # version_in_params = params.get("version", default_version) - # - # # Check and update metadata version in DB. - # schema_fields = params - # - # def create_field(field_type, **kwargs): - # field_mapping = { - # "Str": fields.Str, - # "Int": fields.Int, - # "Float": fields.Float, - # "Bool": fields.Bool, - # } - # return field_mapping[field_type](**kwargs) - # - # # Dynamic Schema Creation - # params['user_id'] = self.user_id - # - # - # schema_instance = self.create_dynamic_schema(params) # Always creating Str field, adjust as needed - # - # logging.info(f"params : {params}") - # - # # Schema Validation - # schema_instance = schema_instance - # print("Schema fields: ", [field for field in schema_instance._declared_fields]) - # loaded_params = schema_instance.load(params) - return await self.vector_db.add_memories( observation=observation, loader_settings=loader_settings, params=params, namespace=namespace, metadata_schema_class = None, embeddings=embeddings @@ -278,7 +246,6 @@ async def fetch_memories( n_of_observations: Optional[int] = 2, ): logging.info(namespace) - logging.info("The search type is %", search_type) logging.info(params) logging.info(observation) diff --git a/level_4/cognitive_architecture/database/vectordb/chunkers/__init__.py b/cognitive_architecture/database/vectordb/chunkers/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/vectordb/chunkers/__init__.py rename to cognitive_architecture/database/vectordb/chunkers/__init__.py diff --git a/level_4/cognitive_architecture/shared/chunk_strategy.py b/cognitive_architecture/database/vectordb/chunkers/chunk_strategy.py similarity index 100% rename from level_4/cognitive_architecture/shared/chunk_strategy.py rename to cognitive_architecture/database/vectordb/chunkers/chunk_strategy.py diff --git a/level_4/cognitive_architecture/database/vectordb/chunkers/chunkers.py b/cognitive_architecture/database/vectordb/chunkers/chunkers.py similarity index 92% rename from level_4/cognitive_architecture/database/vectordb/chunkers/chunkers.py rename to cognitive_architecture/database/vectordb/chunkers/chunkers.py index cfa2c0a2a..16c9b09b3 100644 --- a/level_4/cognitive_architecture/database/vectordb/chunkers/chunkers.py +++ b/cognitive_architecture/database/vectordb/chunkers/chunkers.py @@ -1,7 +1,4 @@ -from langchain.document_loaders import PyPDFLoader -import sys, os - -from cognitive_architecture.shared.chunk_strategy import ChunkStrategy +from cognitive_architecture.database.vectordb.chunkers.chunk_strategy import ChunkStrategy import re def chunk_data(chunk_strategy=None, source_data=None, chunk_size=None, chunk_overlap=None): @@ -32,10 +29,13 @@ def vanilla_chunker(source_data, chunk_size=100, chunk_overlap=20): chunk_overlap=chunk_overlap, length_function=len ) - try: - pages = text_splitter.create_documents([source_data]) - except: - pages = text_splitter.create_documents(source_data.content) + # try: + # pages = text_splitter.create_documents([source_data]) + # except: + # try: + pages = text_splitter.create_documents([source_data]) + # except: + # pages = text_splitter.create_documents(source_data.content) # pages = source_data.load_and_split() return pages diff --git a/cognitive_architecture/database/vectordb/cognee_manager.py b/cognitive_architecture/database/vectordb/cognee_manager.py new file mode 100644 index 000000000..1f3127fc5 --- /dev/null +++ b/cognitive_architecture/database/vectordb/cognee_manager.py @@ -0,0 +1,151 @@ +import os +import requests +import json +from .embeddings import Embeddings +from .vector_db import VectorDB +from .response import Response + + +class CogneeManager: + def __init__(self, embeddings: Embeddings = None, + vector_db: VectorDB = None, + vector_db_key: str = None, + embedding_api_key: str = None, + webhook_url: str = None, + lines_per_batch: int = 1000, + webhook_key: str = None, + document_id: str = None, + chunk_validation_url: str = None, + internal_api_key: str = "test123", + base_url="http://localhost:8000"): + self.embeddings = embeddings if embeddings else Embeddings() + self.vector_db = vector_db if vector_db else VectorDB() + self.webhook_url = webhook_url + self.lines_per_batch = lines_per_batch + self.webhook_key = webhook_key + self.document_id = document_id + self.chunk_validation_url = chunk_validation_url + self.vector_db_key = vector_db_key + self.embeddings_api_key = embedding_api_key + self.internal_api_key = internal_api_key + self.base_url = base_url + + def serialize(self): + data = { + 'EmbeddingsMetadata': json.dumps(self.embeddings.serialize()), + 'VectorDBMetadata': json.dumps(self.vector_db.serialize()), + 'WebhookURL': self.webhook_url, + 'LinesPerBatch': self.lines_per_batch, + 'DocumentID': self.document_id, + 'ChunkValidationURL': self.chunk_validation_url, + } + return {k: v for k, v in data.items() if v is not None} + + def upload(self, file_paths: list[str], base_url=None): + if base_url: + url = base_url + "/jobs" + else: + url = self.base_url + "/jobs" + + data = self.serialize() + headers = self.generate_headers() + multipart_form_data = [('file', (os.path.basename(filepath), open(filepath, 'rb'), 'application/octet-stream')) + for filepath in file_paths] + + print(f"embedding {len(file_paths)} documents at {url}") + response = requests.post(url, files=multipart_form_data, headers=headers, stream=True, data=data) + + if response.status_code == 500: + print(response.text) + return Response(error=response.text, status_code=response.status_code) + + response_json = response.json() + if response.status_code >= 400 and response.status_code < 500: + print(f"Error: {response_json['error']}") + + return Response.from_json(response_json, response.status_code) + + def get_job_statuses(self, job_ids: list[int], base_url=None): + if base_url: + url = base_url + "/jobs/status" + else: + url = self.base_url + "/jobs/status" + + headers = { + "Authorization": self.internal_api_key, + } + + data = { + 'JobIDs': job_ids + } + + print(f"retrieving job statuses for {len(job_ids)} jobs at {url}") + response = requests.post(url, headers=headers, json=data) + + if response.status_code == 500: + print(response.text) + return Response(error=response.text, status_code=response.status_code) + + response_json = response.json() + if response.status_code >= 400 and response.status_code < 500: + print(f"Error: {response_json['error']}") + + return Response.from_json(response_json, response.status_code) + + def embed(self, filepath, base_url=None): + if base_url: + url = base_url + "/embed" + else: + url = self.base_url + "/embed" + + data = self.serialize() + headers = self.generate_headers() + + files = { + 'SourceData': open(filepath, 'rb') + } + + print(f"embedding document at file path {filepath} at {url}") + response = requests.post(url, headers=headers, data=data, files=files) + + if response.status_code == 500: + print(response.text) + return Response(error=response.text, status_code=response.status_code) + + response_json = response.json() + if response.status_code >= 400 and response.status_code < 500: + print(f"Error: {response_json['error']}") + + return Response.from_json(response_json, response.status_code) + + def get_job_status(self, job_id, base_url=None): + if base_url: + url = base_url + "/jobs/" + str(job_id) + "/status" + else: + url = self.base_url + "/jobs/" + str(job_id) + "/status" + + headers = { + "Authorization": self.internal_api_key, + } + + print(f"retrieving job status for job {job_id} at {url}") + response = requests.get(url, headers=headers) + + if response.status_code == 500: + print(response.text) + return Response(error=response.text, status_code=response.status_code) + + response_json = response.json() + if response.status_code >= 400 and response.status_code < 500: + print(f"Error: {response_json['error']}") + + return Response.from_json(response_json, response.status_code) + + def generate_headers(self): + headers = { + "Authorization": self.internal_api_key, + "X-EmbeddingAPI-Key": self.embeddings_api_key, + "X-VectorDB-Key": self.vector_db_key, + "X-Webhook-Key": self.webhook_key + } + return {k: v for k, v in headers.items() if v is not None} \ No newline at end of file diff --git a/cognitive_architecture/database/vectordb/embeddings/embeddings.py b/cognitive_architecture/database/vectordb/embeddings/embeddings.py new file mode 100644 index 000000000..01118d279 --- /dev/null +++ b/cognitive_architecture/database/vectordb/embeddings/embeddings.py @@ -0,0 +1,29 @@ +from .embeddings_type import EmbeddingsType +from ..chunkers.chunk_strategy import ChunkStrategy + + +class Embeddings: + def __init__(self, embeddings_type: EmbeddingsType = EmbeddingsType.OPEN_AI, + chunk_size: int = 256, + chunk_overlap: int = 128, + chunk_strategy: ChunkStrategy = ChunkStrategy.EXACT, + docker_image: str = None, + hugging_face_model_name: str = None): + self.embeddings_type = embeddings_type + self.chunk_size = chunk_size + self.chunk_overlap = chunk_overlap + self.chunk_strategy = chunk_strategy + self.docker_image = docker_image + self.hugging_face_model_name = hugging_face_model_name + + def serialize(self): + data = { + 'embeddings_type': self.embeddings_type.name if self.embeddings_type else None, + 'chunk_size': self.chunk_size, + 'chunk_overlap': self.chunk_overlap, + 'chunk_strategy': self.chunk_strategy.name if self.chunk_strategy else None, + 'docker_image': self.docker_image, + 'hugging_face_model_name': self.hugging_face_model_name + } + + return {k: v for k, v in data.items() if v is not None} diff --git a/cognitive_architecture/database/vectordb/embeddings/embeddings_type.py b/cognitive_architecture/database/vectordb/embeddings/embeddings_type.py new file mode 100644 index 000000000..e2aa498a4 --- /dev/null +++ b/cognitive_architecture/database/vectordb/embeddings/embeddings_type.py @@ -0,0 +1,8 @@ +from enum import Enum + +class EmbeddingsType(Enum): + OPEN_AI = 'open_ai' + COHERE = 'cohere' + SELF_HOSTED = 'self_hosted' + HUGGING_FACE = 'hugging_face' + IMAGE = 'image' \ No newline at end of file diff --git a/cognitive_architecture/database/vectordb/job.py b/cognitive_architecture/database/vectordb/job.py new file mode 100644 index 000000000..67a82c1fd --- /dev/null +++ b/cognitive_architecture/database/vectordb/job.py @@ -0,0 +1,18 @@ +class Job: + def __init__(self, job_id, job_status=None, filename=None): + self.job_id = job_id + self.job_status = job_status + self.filename = filename + + def __str__(self): + attributes = [] + if self.job_id is not None: + attributes.append(f"job_id: {self.job_id}") + if self.job_status is not None: + attributes.append(f"job_status: {self.job_status}") + if self.filename is not None: + attributes.append(f"filename: {self.filename}") + return "Job(" + ", ".join(attributes) + ")" + + def __repr__(self): + return self.__str__() \ No newline at end of file diff --git a/level_4/cognitive_architecture/database/vectordb/loaders/__init__.py b/cognitive_architecture/database/vectordb/loaders/__init__.py similarity index 100% rename from level_4/cognitive_architecture/database/vectordb/loaders/__init__.py rename to cognitive_architecture/database/vectordb/loaders/__init__.py diff --git a/cognitive_architecture/database/vectordb/loaders/loaders.py b/cognitive_architecture/database/vectordb/loaders/loaders.py new file mode 100644 index 000000000..760eeb17e --- /dev/null +++ b/cognitive_architecture/database/vectordb/loaders/loaders.py @@ -0,0 +1,206 @@ +from io import BytesIO +import fitz +import os +import sys + +from cognitive_architecture.database.vectordb.chunkers.chunkers import chunk_data +from cognitive_architecture.shared.language_processing import translate_text, detect_language + +from langchain.document_loaders import UnstructuredURLLoader +from langchain.document_loaders import DirectoryLoader +import logging +import os +from langchain.document_loaders import TextLoader +import requests + + +async def fetch_pdf_content(file_url): + response = requests.get(file_url) + pdf_stream = BytesIO(response.content) + with fitz.open(stream=pdf_stream, filetype='pdf') as doc: + return "".join(page.get_text() for page in doc) + +async def fetch_text_content(file_url): + loader = UnstructuredURLLoader(urls=file_url) + return loader.load() + +async def process_content(content, metadata, loader_strategy, chunk_size, chunk_overlap): + pages = chunk_data(chunk_strategy=loader_strategy, source_data=content, chunk_size=chunk_size, + chunk_overlap=chunk_overlap) + + if metadata is None: + metadata = {"metadata": "None"} + + chunk_count= 0 + + for chunk in pages: + chunk_count+=1 + chunk.metadata = metadata + chunk.metadata["chunk_count"]=chunk_count + if detect_language(pages) != "en": + logging.info("Translating Page") + for page in pages: + if detect_language(page.page_content) != "en": + page.page_content = translate_text(page.page_content) + + return pages + +async def _document_loader(observation: str, loader_settings: dict): + document_format = loader_settings.get("format", "text") + loader_strategy = loader_settings.get("strategy", "VANILLA") + chunk_size = loader_settings.get("chunk_size", 500) + chunk_overlap = loader_settings.get("chunk_overlap", 20) + + logging.info("LOADER SETTINGS %s", loader_settings) + + list_of_docs = loader_settings["path"] + chunked_doc = [] + + if loader_settings.get("source") == "URL": + for file in list_of_docs: + if document_format == "PDF": + content = await fetch_pdf_content(file) + elif document_format == "TEXT": + content = await fetch_text_content(file) + else: + raise ValueError(f"Unsupported document format: {document_format}") + + pages = await process_content(content, metadata=None, loader_strategy=loader_strategy, chunk_size= chunk_size, chunk_overlap= chunk_overlap) + chunked_doc.append(pages) + + elif loader_settings.get("source") == "DEVICE": + if loader_settings.get("bulk_load", False) == True: + current_directory = os.getcwd() + logging.info("Current Directory: %s", current_directory) + loader = DirectoryLoader(".data", recursive=True) + documents = loader.load() + for document in documents: + # print ("Document: ", document.page_content) + pages = await process_content(content= str(document.page_content), metadata=document.metadata, loader_strategy= loader_strategy, chunk_size = chunk_size, chunk_overlap = chunk_overlap) + chunked_doc.append(pages) + else: + from langchain.document_loaders import PyPDFLoader + loader = PyPDFLoader(loader_settings.get("single_document_path")) + documents= loader.load() + + for document in documents: + pages = await process_content(content=str(document.page_content), metadata=document.metadata, + loader_strategy=loader_strategy, chunk_size=chunk_size, + chunk_overlap=chunk_overlap) + chunked_doc.append(pages) + else: + raise ValueError(f"Unsupported source type: {loader_settings.get('source')}") + + return chunked_doc + + + +# async def _document_loader( observation: str, loader_settings: dict): +# +# document_format = loader_settings.get("format", "text") +# loader_strategy = loader_settings.get("strategy", "VANILLA") +# chunk_size = loader_settings.get("chunk_size", 500) +# chunk_overlap = loader_settings.get("chunk_overlap", 20) +# +# +# logging.info("LOADER SETTINGS %s", loader_settings) +# +# list_of_docs = loader_settings["path"] +# chunked_doc = [] +# +# if loader_settings.get("source") == "URL": +# for file in list_of_docs: +# if document_format == "PDF": +# logging.info("File is %s", file) +# pdf_response = requests.get(file) +# pdf_stream = BytesIO(pdf_response.content) +# with fitz.open(stream=pdf_stream, filetype='pdf') as doc: +# file_content = "" +# for page in doc: +# file_content += page.get_text() +# pages = chunk_data(chunk_strategy=loader_strategy, source_data=file_content, chunk_size=chunk_size, +# chunk_overlap=chunk_overlap) +# from cognitive_architecture.shared.language_processing import translate_text,detect_language +# +# if detect_language(pages) != "en": +# logging.info("Current Directory 3") +# for page in pages: +# if detect_language(page.page_content) != "en": +# logging.info("Translating Page") +# page.page_content = translate_text(page.page_content) +# +# chunked_doc.append(pages) +# +# logging.info("Document translation complete. Proceeding...") +# +# chunked_doc.append(pages) +# +# elif document_format == "TEXT": +# loader = UnstructuredURLLoader(urls=file) +# file_content = loader.load() +# pages = chunk_data(chunk_strategy=loader_strategy, source_data=file_content, chunk_size=chunk_size, +# chunk_overlap=chunk_overlap) +# +# from cognitive_architecture.shared.language_processing import translate_text, detect_language +# +# if detect_language(pages) != "en": +# logging.info("Current Directory 3") +# for page in pages: +# if detect_language(page.page_content) != "en": +# logging.info("Translating Page") +# page.page_content = translate_text(page.page_content) +# +# chunked_doc.append(pages) +# +# logging.info("Document translation complete. Proceeding...") +# +# chunked_doc.append(pages) +# +# elif loader_settings.get("source") == "DEVICE": +# +# current_directory = os.getcwd() +# logging.info("Current Directory: %s", current_directory) +# +# loader = DirectoryLoader(".data", recursive=True) +# if document_format == "PDF": +# # loader = SimpleDirectoryReader(".data", recursive=True, exclude_hidden=True) +# documents = loader.load() +# pages = chunk_data(chunk_strategy=loader_strategy, source_data=str(documents), chunk_size=chunk_size, +# chunk_overlap=chunk_overlap) +# logging.info("Documents: %s", documents) +# from cognitive_architecture.shared.language_processing import translate_text, detect_language +# +# if detect_language(pages) != "en": +# logging.info("Current Directory 3") +# for page in pages: +# if detect_language(page.page_content) != "en": +# logging.info("Translating Page") +# page.page_content = translate_text(page.page_content) +# +# chunked_doc.append(pages) +# +# logging.info("Document translation complete. Proceeding...") +# +# # pages = documents.load_and_split() +# chunked_doc.append(pages) +# +# +# elif document_format == "TEXT": +# documents = loader.load() +# pages = chunk_data(chunk_strategy=loader_strategy, source_data=str(documents), chunk_size=chunk_size, +# chunk_overlap=chunk_overlap) +# logging.info("Documents: %s", documents) +# # pages = documents.load_and_split() +# chunked_doc.append(pages) +# +# else: +# raise ValueError(f"Error: ") +# return chunked_doc + + + + + + + + diff --git a/cognitive_architecture/database/vectordb/response.py b/cognitive_architecture/database/vectordb/response.py new file mode 100644 index 000000000..b0176b972 --- /dev/null +++ b/cognitive_architecture/database/vectordb/response.py @@ -0,0 +1,72 @@ +from .job import Job + + +class Response: + def __init__(self, error=None, message=None, successful_uploads=None, failed_uploads=None, + empty_files_count=None, duplicate_files_count=None, job_id=None, + jobs=None, job_status=None, status_code=None): + self.error = error + self.message = message + self.successful_uploads = successful_uploads + self.failed_uploads = failed_uploads + self.empty_files_count = empty_files_count + self.duplicate_files_count = duplicate_files_count + self.job_id = job_id + self.jobs = jobs + self.job_status = job_status + self.status_code = status_code + + @classmethod + def from_json(cls, json_dict, status_code): + successful_uploads = cls._convert_successful_uploads_to_jobs(json_dict.get('successful_uploads', None)) + jobs = cls._convert_to_jobs(json_dict.get('Jobs', None)) + + return cls( + error=json_dict.get('error'), + message=json_dict.get('message'), + successful_uploads=successful_uploads, + failed_uploads=json_dict.get('failed_uploads'), + empty_files_count=json_dict.get('empty_files_count'), + duplicate_files_count=json_dict.get('duplicate_files_count'), + job_id=json_dict.get('JobID'), + jobs=jobs, + job_status=json_dict.get('JobStatus'), + status_code=status_code + ) + + @classmethod + def _convert_successful_uploads_to_jobs(cls, successful_uploads): + if not successful_uploads: + return None + return [Job(filename=key, job_id=val) for key, val in successful_uploads.items()] + + @classmethod + def _convert_to_jobs(cls, jobs): + if not jobs: + return None + return [Job(job_id=job['JobID'], job_status=job['JobStatus']) for job in jobs] + + def __str__(self): + attributes = [] + if self.error is not None: + attributes.append(f"error: {self.error}") + if self.message is not None: + attributes.append(f"message: {self.message}") + if self.successful_uploads is not None: + attributes.append(f"successful_uploads: {str(self.successful_uploads)}") + if self.failed_uploads is not None: + attributes.append(f"failed_uploads: {self.failed_uploads}") + if self.empty_files_count is not None: + attributes.append(f"empty_files_count: {self.empty_files_count}") + if self.duplicate_files_count is not None: + attributes.append(f"duplicate_files_count: {self.duplicate_files_count}") + if self.job_id is not None: + attributes.append(f"job_id: {self.job_id}") + if self.jobs is not None: + attributes.append(f"jobs: {str(self.jobs)}") + if self.job_status is not None: + attributes.append(f"job_status: {self.job_status}") + if self.status_code is not None: + attributes.append(f"status_code: {self.status_code}") + + return "Response(" + ", ".join(attributes) + ")" \ No newline at end of file diff --git a/cognitive_architecture/database/vectordb/vector_db_type.py b/cognitive_architecture/database/vectordb/vector_db_type.py new file mode 100644 index 000000000..b7983e8bc --- /dev/null +++ b/cognitive_architecture/database/vectordb/vector_db_type.py @@ -0,0 +1,13 @@ +from enum import Enum + +class VectorDBType(Enum): + PINECONE = 'pinecone' + WEAVIATE = 'weaviate' + MILVUS = 'milvus' + QDRANT = 'qdrant' + DEEPLAKE = 'deeplake' + VESPA = 'vespa' + PGVECTOR = 'pgvector' + REDIS = 'redis' + LANCEDB = 'lancedb' + MONGODB = 'mongodb' \ No newline at end of file diff --git a/level_4/cognitive_architecture/database/vectordb/vectordb.py b/cognitive_architecture/database/vectordb/vectordb.py similarity index 79% rename from level_4/cognitive_architecture/database/vectordb/vectordb.py rename to cognitive_architecture/database/vectordb/vectordb.py index 4b988d223..f0581a356 100644 --- a/level_4/cognitive_architecture/database/vectordb/vectordb.py +++ b/cognitive_architecture/database/vectordb/vectordb.py @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.init_weaviate(embeddings= self.embeddings, namespace = self.namespace) - def init_weaviate(self, embeddings=OpenAIEmbeddings(), namespace=None,retriever_type="",): + def init_weaviate(self, embeddings=OpenAIEmbeddings(openai_api_key=os.getenv("OPENAI_API_KEY", "")), namespace=None,retriever_type="",): # Weaviate initialization logic auth_config = weaviate.auth.AuthApiKey( api_key=os.environ.get("WEAVIATE_API_KEY") @@ -130,8 +130,6 @@ class DynamicDocumentSchema(Schema): def _stuct(self, observation, params, metadata_schema_class =None): """Utility function to create the document structure with optional custom fields.""" - - # Construct document data document_data = { "metadata": params, @@ -151,37 +149,38 @@ async def add_memories(self, observation, loader_settings=None, params=None, nam # Update Weaviate memories here if namespace is None: namespace = self.namespace + params['user_id'] = self.user_id + logging.info("User id is %s", self.user_id) retriever = self.init_weaviate(embeddings=OpenAIEmbeddings(),namespace = namespace, retriever_type="single_document_context") if loader_settings: # Assuming _document_loader returns a list of documents documents = await _document_loader(observation, loader_settings) logging.info("here are the docs %s", str(documents)) chunk_count = 0 - for doc in documents[0]: - chunk_count += 1 - params['chunk_order'] = chunk_count - # document_to_load = self._stuct(doc.page_content, params, metadata_schema_class) - - # logging.info("Loading document with provided loader settings %s", str(document_to_load)) - retriever.add_documents([ - Document(metadata=params, page_content=doc.page_content)]) + for doc_list in documents: + for doc in doc_list: + chunk_count += 1 + params['chunk_count'] = doc.metadata.get("chunk_count", "None") + logging.info("Loading document with provided loader settings %s", str(doc)) + params['source'] = doc.metadata.get("source", "None") + logging.info("Params are %s", str(params)) + retriever.add_documents([ + Document(metadata=params, page_content=doc.page_content)]) else: chunk_count = 0 from cognitive_architecture.database.vectordb.chunkers.chunkers import chunk_data - documents = [chunk_data(chunk_strategy="VANILLA", source_data=observation, chunk_size=50, + documents = [chunk_data(chunk_strategy="VANILLA", source_data=observation, chunk_size=300, chunk_overlap=20)] for doc in documents[0]: chunk_count += 1 params['chunk_order'] = chunk_count - # document_to_load = self._stuct(observation, params, metadata_schema_class) - - logging.info("Loading document with defautl loader settings %s", str(doc)) - - # logging.info("Loading document with defautl loader settings %s", str(document_to_load)) + params['source'] = "User loaded" + logging.info("Loading document with default loader settings %s", str(doc)) + logging.info("Params are %s", str(params)) retriever.add_documents([ Document(metadata=params, page_content=doc.page_content)]) - async def fetch_memories(self, observation: str, namespace: str = None, search_type: str = 'hybrid', **kwargs): + async def fetch_memories(self, observation: str, namespace: str = None, search_type: str = 'hybrid',params=None, **kwargs): """ Fetch documents from weaviate. @@ -200,22 +199,9 @@ async def fetch_memories(self, observation: str, namespace: str = None, search_t client = self.init_weaviate(namespace =self.namespace) if search_type is None: search_type = 'hybrid' - logging.info("The search type is s%", search_type) - if search_type == 'summary': - from weaviate.classes import Filter - client = weaviate.connect_to_wcs( - cluster_url=config.weaviate_url, - auth_credentials=weaviate.AuthApiKey(config.weaviate_api_key) - ) - summary_collection = client.collections.get(self.namespace) - response = summary_collection.query.fetch_objects( - filters=Filter("user_id").equal(self.user_id) & - Filter("chunk_order").less_than(25), - limit=15 - ) - return response + if not namespace: namespace = self.namespace @@ -266,6 +252,65 @@ def list_objects_of_class(class_name, schema): .with_autocut(n_of_observations) .do() ) + elif search_type == 'summary': + filter_object = { + "operator": "And", + "operands": [ + { + "path": ["user_id"], + "operator": "Equal", + "valueText": self.user_id, + }, + { + "path": ["chunk_order"], + "operator": "LessThan", + "valueNumber": 30, + }, + ] + } + base_query = client.query.get( + namespace, list(list_objects_of_class(namespace, client.schema.get())) + ).with_additional( + ["id", "creationTimeUnix", "lastUpdateTimeUnix", "score", 'distance'] + ).with_where(filter_object).with_limit(30) + query_output = ( + base_query + # .with_hybrid(query=observation, fusion_type=HybridFusion.RELATIVE_SCORE) + .do() + ) + + elif search_type == 'summary_filter_by_object_name': + filter_object = { + "operator": "And", + "operands": [ + { + "path": ["user_id"], + "operator": "Equal", + "valueText": self.user_id, + }, + { + "path": ["doc_id"], + "operator": "Equal", + "valueText": params, + }, + ] + } + base_query = client.query.get( + namespace, list(list_objects_of_class(namespace, client.schema.get())) + ).with_additional( + ["id", "creationTimeUnix", "lastUpdateTimeUnix", "score", 'distance'] + ).with_where(filter_object).with_limit(30).with_hybrid(query=observation, fusion_type=HybridFusion.RELATIVE_SCORE) + query_output = ( + base_query + .do() + ) + # from weaviate.classes import Filter + # client = weaviate.connect_to_wcs( + # cluster_url=config.weaviate_url, + # auth_credentials=weaviate.AuthApiKey(config.weaviate_api_key) + # ) + + return query_output elif search_type == 'generate': generate_prompt = kwargs.get('generate_prompt', "") query_output = ( @@ -293,6 +338,8 @@ def list_objects_of_class(class_name, schema): return query_output + + async def delete_memories(self, namespace:str, params: dict = None): if namespace is None: namespace = self.namespace diff --git a/level_4/cognitive_architecture/fetch_secret.py b/cognitive_architecture/fetch_secret.py similarity index 90% rename from level_4/cognitive_architecture/fetch_secret.py rename to cognitive_architecture/fetch_secret.py index c9c070035..b8635b009 100644 --- a/level_4/cognitive_architecture/fetch_secret.py +++ b/cognitive_architecture/fetch_secret.py @@ -21,8 +21,11 @@ def fetch_secret(secret_name, region_name, env_file_path): + print("Initializing session") session = boto3.session.Session() + print("Session initialized") client = session.client(service_name="secretsmanager", region_name=region_name) + print("Client initialized") try: response = client.get_secret_value(SecretId=secret_name) @@ -49,18 +52,18 @@ def fetch_secret(secret_name, region_name, env_file_path): return "Success in loading env files" -env_file = ".env" +env_file = "../.env" if os.path.exists(env_file): # Load default environment variables (.env) load_dotenv() - print("Talk to the AI!") + print("cognee is running") else: secrets = fetch_secret( f"promethai-{environment}-backend-secretso-promethaijs-dotenv", "eu-west-1", - ".env", + "../.env", ) if secrets: print(secrets) @@ -73,7 +76,7 @@ def fetch_secret(secret_name, region_name, env_file_path): secrets = fetch_secret( f"promethai-dev-backend-secretso-promethaijs-dotenv", "eu-west-1", - ".env", + "../.env", ) load_dotenv() elif "prd" in environment: @@ -81,6 +84,6 @@ def fetch_secret(secret_name, region_name, env_file_path): secrets = fetch_secret( f"promethai-prd-backend-secretso-promethaijs-dotenv", "eu-west-1", - ".env", + "../.env", ) load_dotenv() diff --git a/cognitive_architecture/llm/prompts/generate_graph_prompt.txt b/cognitive_architecture/llm/prompts/generate_graph_prompt.txt new file mode 100644 index 000000000..744903cda --- /dev/null +++ b/cognitive_architecture/llm/prompts/generate_graph_prompt.txt @@ -0,0 +1,34 @@ +You are a top-tier algorithm +designed for extracting information in structured formats to build a knowledge graph. +- **Nodes** represent entities and concepts. They're akin to Wikipedia nodes. +- The aim is to achieve simplicity and clarity in the +knowledge graph, making it accessible for a vast audience. +## 2. Labeling Nodes +- **Consistency**: Ensure you use basic or elementary types for node labels. + - For example, when you identify an entity representing a person, + always label it as **"person"**. + Avoid using more specific terms like "mathematician" or "scientist". + - Include event, entity, time, or action nodes to the category. + - Classify the memory type as episodic or semantic. +- **Node IDs**: Never utilize integers as node IDs. + Node IDs should be names or human-readable identifiers found in the text. +## 3. Handling Numerical Data and Dates +- Numerical data, like age or other related information, +should be incorporated as attributes or properties of the respective nodes. +- **No Separate Nodes for Dates/Numbers**: +Do not create separate nodes for dates or numerical values. + Always attach them as attributes or properties of nodes. +- **Property Format**: Properties must be in a key-value format. +- **Quotation Marks**: Never use escaped single or double quotes within property values. +- **Naming Convention**: Use camelCase for property keys, e.g., `birthDate`. +## 4. Coreference Resolution +- **Maintain Entity Consistency**: +When extracting entities, it's vital to ensure consistency. +If an entity, such as "John Doe", is mentioned multiple times +in the text but is referred to by different names or pronouns (e.g., "Joe", "he"), +always use the most complete identifier for that entity throughout the knowledge graph. + In this example, use "John Doe" as the entity ID. +Remember, the knowledge graph should be coherent and easily understandable, + so maintaining consistency in entity references is crucial. +## 5. Strict Compliance +Adhere to the rules strictly. Non-compliance will result in termination \ No newline at end of file diff --git a/cognitive_architecture/llm/queries.py b/cognitive_architecture/llm/queries.py new file mode 100644 index 000000000..ac647d657 --- /dev/null +++ b/cognitive_architecture/llm/queries.py @@ -0,0 +1,84 @@ +import os + +from dotenv import load_dotenv + +from ..shared.data_models import Node, Edge, KnowledgeGraph, GraphQLQuery, MemorySummary +from ..config import Config +import instructor +from openai import OpenAI +config = Config() +config.load() + +print(config.model) +print(config.openai_key) + +OPENAI_API_KEY = config.openai_key + +aclient = instructor.patch(OpenAI()) + +load_dotenv() + + +# Function to read query prompts from files +def read_query_prompt(filename): + with open(filename, 'r') as file: + return file.read() + + +def generate_graph(input) -> KnowledgeGraph: + model = "gpt-4-1106-preview" # Define the model here + user_prompt = f"Use the given format to extract information from the following input: {input}." + system_prompt = read_query_prompt('prompts/generate_graph_prompt.txt') + + out = aclient.chat.completions.create( + model=model, + messages=[ + { + "role": "user", + "content": user_prompt, + }, + { + "role": "system", + "content": system_prompt, + }, + ], + response_model=KnowledgeGraph, + ) + return out + + + +async def generate_summary(input) -> MemorySummary: + out = aclient.chat.completions.create( + model="gpt-4-1106-preview", + messages=[ + { + "role": "user", + "content": f"""Use the given format summarize and reduce the following input: {input}. """, + + }, + { "role":"system", "content": """You are a top-tier algorithm + designed for summarizing existing knowledge graphs in structured formats based on a knowledge graph. + ## 1. Strict Compliance + Adhere to the rules strictly. Non-compliance will result in termination. + ## 2. Don't forget your main goal is to reduce the number of nodes in the knowledge graph while preserving the information contained in it."""} + ], + response_model=MemorySummary, + ) + return out + + +def user_query_to_edges_and_nodes( input: str) ->KnowledgeGraph: + system_prompt = read_query_prompt('prompts/generate_graph_prompt.txt') + return aclient.chat.completions.create( + model=config.model, + messages=[ + { + "role": "user", + "content": f"""Use the given format to extract information from the following input: {input}. """, + + }, + {"role": "system", "content":system_prompt} + ], + response_model=KnowledgeGraph, + ) \ No newline at end of file diff --git a/level_4/cognitive_architecture/openai_tools.py b/cognitive_architecture/openai_tools.py similarity index 96% rename from level_4/cognitive_architecture/openai_tools.py rename to cognitive_architecture/openai_tools.py index 3f180edfc..68ce05a00 100644 --- a/level_4/cognitive_architecture/openai_tools.py +++ b/cognitive_architecture/openai_tools.py @@ -1,140 +1,140 @@ -import asyncio -import random -import os -import time - - - -HOST = os.getenv("OPENAI_API_BASE") -HOST_TYPE = os.getenv("BACKEND_TYPE") # default None == ChatCompletion - -import openai - -if HOST is not None: - openai.api_base = HOST - - -def retry_with_exponential_backoff( - func, - initial_delay: float = 1, - exponential_base: float = 2, - jitter: bool = True, - max_retries: int = 20, - errors: tuple = (openai.error.RateLimitError,), -): - """Retry a function with exponential backoff.""" - - def wrapper(*args, **kwargs): - # Initialize variables - num_retries = 0 - delay = initial_delay - - # Loop until a successful response or max_retries is hit or an exception is raised - while True: - try: - return func(*args, **kwargs) - - # Retry on specified errors - except errors as e: - # Increment retries - num_retries += 1 - - # Check if max retries has been reached - if num_retries > max_retries: - raise Exception(f"Maximum number of retries ({max_retries}) exceeded.") - - # Increment the delay - delay *= exponential_base * (1 + jitter * random.random()) - - # Sleep for the delay - time.sleep(delay) - - # Raise exceptions for any errors not specified - except Exception as e: - raise e - - return wrapper - - -@retry_with_exponential_backoff -def completions_with_backoff(**kwargs): - # Local model - return openai.ChatCompletion.create(**kwargs) - - -def aretry_with_exponential_backoff( - func, - initial_delay: float = 1, - exponential_base: float = 2, - jitter: bool = True, - max_retries: int = 20, - errors: tuple = (openai.error.RateLimitError,), -): - """Retry a function with exponential backoff.""" - - async def wrapper(*args, **kwargs): - # Initialize variables - num_retries = 0 - delay = initial_delay - - # Loop until a successful response or max_retries is hit or an exception is raised - while True: - try: - return await func(*args, **kwargs) - - # Retry on specified errors - except errors as e: - print(f"acreate (backoff): caught error: {e}") - # Increment retries - num_retries += 1 - - # Check if max retries has been reached - if num_retries > max_retries: - raise Exception(f"Maximum number of retries ({max_retries}) exceeded.") - - # Increment the delay - delay *= exponential_base * (1 + jitter * random.random()) - - # Sleep for the delay - await asyncio.sleep(delay) - - # Raise exceptions for any errors not specified - except Exception as e: - raise e - - return wrapper - - -@aretry_with_exponential_backoff -async def acompletions_with_backoff(**kwargs): - return await openai.ChatCompletion.acreate(**kwargs) - - -@aretry_with_exponential_backoff -async def acreate_embedding_with_backoff(**kwargs): - """Wrapper around Embedding.acreate w/ backoff""" - return await openai.Embedding.acreate(**kwargs) - - -async def async_get_embedding_with_backoff(text, model="text-embedding-ada-002"): - """To get text embeddings, import/call this function - It specifies defaults + handles rate-limiting + is async""" - text = text.replace("\n", " ") - response = await acreate_embedding_with_backoff(input=[text], model=model) - embedding = response["data"][0]["embedding"] - return embedding - - -@retry_with_exponential_backoff -def create_embedding_with_backoff(**kwargs): - return openai.Embedding.create(**kwargs) - - -def get_embedding_with_backoff(text, model="text-embedding-ada-002"): - text = text.replace("\n", " ") - response = create_embedding_with_backoff(input=[text], model=model) - embedding = response["data"][0]["embedding"] - return embedding - - - +import asyncio +import random +import os +import time + + + +HOST = os.getenv("OPENAI_API_BASE") +HOST_TYPE = os.getenv("BACKEND_TYPE") # default None == ChatCompletion + +import openai + +if HOST is not None: + openai.api_base = HOST + + +def retry_with_exponential_backoff( + func, + initial_delay: float = 1, + exponential_base: float = 2, + jitter: bool = True, + max_retries: int = 20, + errors: tuple = (openai.error.RateLimitError,), +): + """Retry a function with exponential backoff.""" + + def wrapper(*args, **kwargs): + # Initialize variables + num_retries = 0 + delay = initial_delay + + # Loop until a successful response or max_retries is hit or an exception is raised + while True: + try: + return func(*args, **kwargs) + + # Retry on specified errors + except errors as e: + # Increment retries + num_retries += 1 + + # Check if max retries has been reached + if num_retries > max_retries: + raise Exception(f"Maximum number of retries ({max_retries}) exceeded.") + + # Increment the delay + delay *= exponential_base * (1 + jitter * random.random()) + + # Sleep for the delay + time.sleep(delay) + + # Raise exceptions for any errors not specified + except Exception as e: + raise e + + return wrapper + + +@retry_with_exponential_backoff +def completions_with_backoff(**kwargs): + # Local model + return openai.ChatCompletion.create(**kwargs) + + +def aretry_with_exponential_backoff( + func, + initial_delay: float = 1, + exponential_base: float = 2, + jitter: bool = True, + max_retries: int = 20, + errors: tuple = (openai.error.RateLimitError,), +): + """Retry a function with exponential backoff.""" + + async def wrapper(*args, **kwargs): + # Initialize variables + num_retries = 0 + delay = initial_delay + + # Loop until a successful response or max_retries is hit or an exception is raised + while True: + try: + return await func(*args, **kwargs) + + # Retry on specified errors + except errors as e: + print(f"acreate (backoff): caught error: {e}") + # Increment retries + num_retries += 1 + + # Check if max retries has been reached + if num_retries > max_retries: + raise Exception(f"Maximum number of retries ({max_retries}) exceeded.") + + # Increment the delay + delay *= exponential_base * (1 + jitter * random.random()) + + # Sleep for the delay + await asyncio.sleep(delay) + + # Raise exceptions for any errors not specified + except Exception as e: + raise e + + return wrapper + + +@aretry_with_exponential_backoff +async def acompletions_with_backoff(**kwargs): + return await openai.ChatCompletion.acreate(**kwargs) + + +@aretry_with_exponential_backoff +async def acreate_embedding_with_backoff(**kwargs): + """Wrapper around Embedding.acreate w/ backoff""" + return await openai.Embedding.acreate(**kwargs) + + +async def async_get_embedding_with_backoff(text, model="text-embedding-ada-002"): + """To get text embeddings, import/call this function + It specifies defaults + handles rate-limiting + is async""" + text = text.replace("\n", " ") + response = await acreate_embedding_with_backoff(input=[text], model=model) + embedding = response["data"][0]["embedding"] + return embedding + + +@retry_with_exponential_backoff +def create_embedding_with_backoff(**kwargs): + return openai.Embedding.create(**kwargs) + + +def get_embedding_with_backoff(text, model="text-embedding-ada-002"): + text = text.replace("\n", " ") + response = create_embedding_with_backoff(input=[text], model=model) + embedding = response["data"][0]["embedding"] + return embedding + + + diff --git a/level_4/cognitive_architecture/presets.py b/cognitive_architecture/presets.py similarity index 100% rename from level_4/cognitive_architecture/presets.py rename to cognitive_architecture/presets.py diff --git a/level_4/cognitive_architecture/graph_database/__init__.py b/cognitive_architecture/shared/__init__.py similarity index 100% rename from level_4/cognitive_architecture/graph_database/__init__.py rename to cognitive_architecture/shared/__init__.py diff --git a/cognitive_architecture/shared/data_models.py b/cognitive_architecture/shared/data_models.py new file mode 100644 index 000000000..e74355cf6 --- /dev/null +++ b/cognitive_architecture/shared/data_models.py @@ -0,0 +1,33 @@ +from typing import Optional, List + +from pydantic import BaseModel, Field +class Node(BaseModel): + id: int + description: str + category: str + color: str ="blue" + memory_type: str + created_at: Optional[float] = None + summarized: Optional[bool] = None + +class Edge(BaseModel): + source: int + target: int + description: str + color: str= "blue" + created_at: Optional[float] = None + summarized: Optional[bool] = None + +class KnowledgeGraph(BaseModel): + nodes: List[Node] = Field(..., default_factory=list) + edges: List[Edge] = Field(..., default_factory=list) + +class GraphQLQuery(BaseModel): + query: str + +class MemorySummary(BaseModel): + nodes: List[Node] = Field(..., default_factory=list) + edges: List[Edge] = Field(..., default_factory=list) + + + diff --git a/cognitive_architecture/shared/language_processing.py b/cognitive_architecture/shared/language_processing.py new file mode 100644 index 000000000..8fda6f2fd --- /dev/null +++ b/cognitive_architecture/shared/language_processing.py @@ -0,0 +1,92 @@ +import boto3 +from botocore.exceptions import BotoCoreError, ClientError +from langdetect import detect, LangDetectException +import iso639 +from dotenv import load_dotenv +load_dotenv() + +import logging + +# Basic configuration of the logging system +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + + + +def detect_language(text): + """ + Detect the language of the given text and return its ISO 639-1 language code. + If the detected language is Croatian ('hr'), it maps to Serbian ('sr'). + The text is trimmed to the first 100 characters for efficient processing. + + Parameters: + text (str): The text for language detection. + + Returns: + str: The ISO 639-1 language code of the detected language, or 'None' in case of an error. + """ + + # Trim the text to the first 100 characters + trimmed_text = text[:100] + + try: + # Detect the language using langdetect + detected_lang_iso639_1 = detect(trimmed_text) + logging.info(f"Detected ISO 639-1 code: {detected_lang_iso639_1}") + + # Special case: map 'hr' (Croatian) to 'sr' (Serbian ISO 639-2) + if detected_lang_iso639_1 == 'hr': + return 'sr' + return detected_lang_iso639_1 + + except LangDetectException as e: + logging.error(f"Language detection error: {e}") + except Exception as e: + logging.error(f"Unexpected error: {e}") + + return -1 + + + +def translate_text(text, source_language:str='sr', target_language:str='en', region_name='eu-west-1'): + """ + Translate text from source language to target language using AWS Translate. + + + Parameters: + text (str): The text to be translated. + source_language (str): The source language code (e.g., 'sr' for Serbian). ISO 639-2 Code https://www.loc.gov/standards/iso639-2/php/code_list.php + target_language (str): The target language code (e.g., 'en' for English). ISO 639-2 Code https://www.loc.gov/standards/iso639-2/php/code_list.php + region_name (str): AWS region name. + + Returns: + str: Translated text or an error message. + """ + if not text: + return "No text provided for translation." + + if not source_language or not target_language: + return "Both source and target language codes are required." + + try: + translate = boto3.client(service_name='translate', region_name=region_name, use_ssl=True) + result = translate.translate_text(Text=text, SourceLanguageCode=source_language, TargetLanguageCode=target_language) + return result.get('TranslatedText', 'No translation found.') + + except BotoCoreError as e: + logging.info(f"BotoCoreError occurred: {e}") + return "Error with AWS Translate service configuration or request." + + except ClientError as e: + logging.info(f"ClientError occurred: {e}") + return "Error with AWS client or network issue." + + +source_language = 'sr' +target_language = 'en' +text_to_translate = "Ja volim da pecam i idem na reku da šetam pored nje ponekad" + +translated_text = translate_text(text_to_translate, source_language, target_language) +print(translated_text) + + +# print(detect_language("Koliko krava ide u setnju?")) diff --git a/level_4/cognitive_architecture/utils.py b/cognitive_architecture/utils.py similarity index 82% rename from level_4/cognitive_architecture/utils.py rename to cognitive_architecture/utils.py index 5617ef291..8b33245d5 100644 --- a/level_4/cognitive_architecture/utils.py +++ b/cognitive_architecture/utils.py @@ -7,6 +7,11 @@ from sqlalchemy import or_ from sqlalchemy.orm import contains_eager +from cognitive_architecture.database.postgres.database import AsyncSessionLocal +from dotenv import load_dotenv + +load_dotenv() + # from graph_database.graph import KnowledgeGraph @@ -217,10 +222,11 @@ async def get_unsumarized_vector_db_namespace(session: AsyncSession, user_id: st operations = result.unique().scalars().all() # Extract memory names and document names and IDs - memory_names = [memory.memory_name for op in operations for memory in op.memories] + # memory_names = [memory.memory_name for op in operations for memory in op.memories] + memory_details = [(memory.memory_name, memory.memory_category) for op in operations for memory in op.memories] docs = [(doc.doc_name, doc.id) for op in operations for doc in op.docs] - return memory_names, docs + return memory_details, docs # except Exception as e: # # Handle the exception as needed @@ -264,4 +270,42 @@ async def get_memory_name_by_doc_id(session: AsyncSession, docs_id: str): - +# +# async def main(): +# user_id = "user" +# +# async with session_scope(AsyncSessionLocal()) as session: +# output = await get_unsumarized_vector_db_namespace(session, user_id) +# +# print(output) +# # await update_entity(session, DocsModel, "8cd9a022-5a7a-4af5-815a-f988415536ae", True) +# # out = await get_vectordb_namespace(session, user_id) +# # params = { +# # "version": "1.0", +# # "agreement_id": "AG123456", +# # "privacy_policy": "https://example.com/privacy", +# # "terms_of_service": "https://example.com/terms", +# # "format": "json", +# # "schema_version": "1.1", +# # "checksum": "a1b2c3d4e5f6", +# # "owner": "John Doe", +# # "license": "MIT", +# # "validity_start": "2023-08-01", +# # "validity_end": "2024-07-31", +# # } +# # loader_settings = { +# # "format": "PDF", +# # "source": "DEVICE", +# # "path": [".data"], +# # "strategy": "SUMMARY", +# # } +# # await load_documents_to_vectorstore(session, user_id, loader_settings=loader_settings) +# # await user_query_to_graph_db(session, user_id, "I walked in the forest yesterday and added to my list I need to buy some milk in the store and get a summary from a classical book i read yesterday") +# # await add_documents_to_graph_db(session, user_id, loader_settings=loader_settings) +# # await user_context_enrichment(session, user_id, query="Tell me about the book I read yesterday") +# +# +# if __name__ == "__main__": +# import asyncio +# +# asyncio.run(main()) diff --git a/level_4/cognitive_architecture/vectorstore_manager.py b/cognitive_architecture/vectorstore_manager.py similarity index 98% rename from level_4/cognitive_architecture/vectorstore_manager.py rename to cognitive_architecture/vectorstore_manager.py index 89a66fc4c..46dfd5e77 100644 --- a/level_4/cognitive_architecture/vectorstore_manager.py +++ b/cognitive_architecture/vectorstore_manager.py @@ -170,7 +170,7 @@ def load_environment_variables(self) -> None: self.OPENAI_API_KEY = config.openai_key @classmethod - async def create_memory(cls, user_id: str, session, job_id, memory_label:str, **kwargs): + async def create_memory(cls, user_id: str, session, job_id:str=None, memory_label:str=None, **kwargs): """ Class method that acts as a factory method for creating Memory instances. It performs necessary DB checks or updates before instance creation. @@ -251,7 +251,7 @@ async def handle_new_user(user_id: str, session): return f"Error creating user: {str(e)}" @staticmethod - async def handle_new_memory(user_id: str, session, job_id: str = None, memory_name: str = None): + async def handle_new_memory(user_id: str, session, job_id: str = None, memory_name: str = None, memory_category:str='PUBLIC'): """ Handle new memory creation associated with a user. @@ -275,6 +275,7 @@ async def handle_new_memory(user_id: str, session, job_id: str = None, memory_na user_id=user_id, operation_id=job_id, memory_name=memory_name, + memory_category=memory_category, methods_list=str(["Memory", "SemanticMemory", "EpisodicMemory"]), attributes_list=str( [ diff --git a/level_4/docker-compose.yml b/docker-compose.yml similarity index 100% rename from level_4/docker-compose.yml rename to docker-compose.yml diff --git a/level_4/entrypoint.sh b/entrypoint.sh similarity index 100% rename from level_4/entrypoint.sh rename to entrypoint.sh diff --git a/level_4/.data/BartlebyTheScrivener.pdf b/level_4/.data/BartlebyTheScrivener.pdf deleted file mode 100644 index 1a38dd3de..000000000 Binary files a/level_4/.data/BartlebyTheScrivener.pdf and /dev/null differ diff --git a/level_4/LICENSE b/level_4/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/level_4/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/level_4/api.py b/level_4/api.py deleted file mode 100644 index 051507c98..000000000 --- a/level_4/api.py +++ /dev/null @@ -1,179 +0,0 @@ -import json -import logging -import os -from enum import Enum -from typing import Dict, Any - -import uvicorn -from fastapi import FastAPI, BackgroundTasks, HTTPException -from fastapi.responses import JSONResponse -from pydantic import BaseModel - -from cognitive_architecture.database.postgres.database import AsyncSessionLocal -from cognitive_architecture.database.postgres.database_crud import session_scope -from cognitive_architecture.vectorstore_manager import Memory -from dotenv import load_dotenv -from main import add_documents_to_graph_db, user_context_enrichment -from cognitive_architecture.config import Config - -# Set up logging -logging.basicConfig( - level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL) - format="%(asctime)s [%(levelname)s] %(message)s", # Set the log message format -) - -logger = logging.getLogger(__name__) - - -load_dotenv() -OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") -app = FastAPI(debug=True) -# -# from auth.cognito.JWTBearer import JWTBearer -# from auth.auth import jwks -# -# auth = JWTBearer(jwks) - -from fastapi import Depends - - -config = Config() -config.load() - -class ImageResponse(BaseModel): - success: bool - message: str - - -@app.get( - "/", -) -async def root(): - """ - Root endpoint that returns a welcome message. - """ - return {"message": "Hello, World, I am alive!"} - - -@app.get("/health") -def health_check(): - """ - Health check endpoint that returns the server status. - """ - return {"status": "OK"} - - - - -class Payload(BaseModel): - payload: Dict[str, Any] - -@app.post("/add-memory", response_model=dict) -async def add_memory( - payload: Payload, - # files: List[UploadFile] = File(...), -): - try: - logging.info(" Adding to Memory ") - decoded_payload = payload.payload - async with session_scope(session=AsyncSessionLocal()) as session: - from main import load_documents_to_vectorstore - - if 'settings' in decoded_payload and decoded_payload['settings'] is not None: - settings_for_loader = decoded_payload['settings'] - else: - settings_for_loader = None - - if 'content' in decoded_payload and decoded_payload['content'] is not None: - content = decoded_payload['content'] - else: - content = None - - output = await load_documents_to_vectorstore(session, decoded_payload['user_id'], content=content, loader_settings=settings_for_loader) - return JSONResponse(content={"response": output}, status_code=200) - - except Exception as e: - return JSONResponse( - content={"response": {"error": str(e)}}, status_code=503 - ) - -@app.post("/user-query-to-graph") -async def user_query_to_graph(payload: Payload): - try: - from main import user_query_to_graph_db - decoded_payload = payload.payload - # Execute the query - replace this with the actual execution method - async with session_scope(session=AsyncSessionLocal()) as session: - # Assuming you have a method in Neo4jGraphDB to execute the query - result = await user_query_to_graph_db(session= session, user_id= decoded_payload['user_id'],query_input =decoded_payload['query']) - - return result - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@app.post("/document-to-graph-db") -async def document_to_graph_db(payload: Payload): - logging.info("Adding documents to graph db") - try: - decoded_payload = payload.payload - if 'settings' in decoded_payload and decoded_payload['settings'] is not None: - settings_for_loader = decoded_payload['settings'] - else: - settings_for_loader = None - async with session_scope(session=AsyncSessionLocal()) as session: - result = await add_documents_to_graph_db(session =session, user_id = decoded_payload['user_id'], loader_settings =settings_for_loader) - return result - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -@app.post("/user-query-processor") -async def user_query_processor(payload: Payload): - try: - decoded_payload = payload.payload - - # Execute the query - replace this with the actual execution method - async with session_scope(session=AsyncSessionLocal()) as session: - # Assuming you have a method in Neo4jGraphDB to execute the query - result = await user_context_enrichment(session, decoded_payload['user_id'], decoded_payload['query']) - return JSONResponse(content={"response": result}, status_code=200) - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@app.post("/user-query-classifier") -async def user_query_classfier(payload: Payload): - try: - decoded_payload = payload.payload - - # Execute the query - replace this with the actual execution method - async with session_scope(session=AsyncSessionLocal()) as session: - from cognitive_architecture.classifiers.classifier import classify_user_query - # Assuming you have a method in Neo4jGraphDB to execute the query - result = await classify_user_query(session, decoded_payload['user_id'], decoded_payload['query']) - return JSONResponse(content={"response": result}, status_code=200) - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -def start_api_server(host: str = "0.0.0.0", port: int = 8000): - """ - Start the API server using uvicorn. - - Parameters: - host (str): The host for the server. - port (int): The port for the server. - """ - try: - logger.info(f"Starting server at {host}:{port}") - uvicorn.run(app, host=host, port=port) - except Exception as e: - logger.exception(f"Failed to start server: {e}") - # Here you could add any cleanup code or error recovery code. - - -if __name__ == "__main__": - start_api_server() diff --git a/level_4/bin/dockerize b/level_4/bin/dockerize deleted file mode 100755 index 30cfede3c..000000000 --- a/level_4/bin/dockerize +++ /dev/null @@ -1,36 +0,0 @@ -set -euo pipefail - -AWS_REGION=${region:-eu-west-1} -AWS_DEPLOYMENT_ACCOUNT=${account:-463722570299} -AWS_REPOSITORY=${repo:-"${AWS_DEPLOYMENT_ACCOUNT}.dkr.ecr.${AWS_REGION}.amazonaws.com"} - -STAGE=${stage:-"dev"} -SHA_SHORT="$(git rev-parse --short HEAD)" -CUR_DATE="$(date +%Y%m%d%H%M%S)" -VERSION="$STAGE-$CUR_DATE-$SHA_SHORT" -IMAGE_NAME=${image_name:-promethai-${STAGE}-promethai-backend} - -REPO_NAME="${AWS_REPOSITORY}/${IMAGE_NAME}" -FULL_IMAGE_NAME="${REPO_NAME}:${VERSION}" -APP_DIR=${app_dir:-"."} - -PUBLISH=${publish:-false} - -echo "Building docker image ${FULL_IMAGE_NAME} located in dir ${app_dir}" - -pushd "${APP_DIR}" && - docker buildx build --platform linux/amd64 \ - --build-arg STAGE=${STAGE} \ - -t "${FULL_IMAGE_NAME}" . && - echo "${VERSION}" >/tmp/.DOCKER_IMAGE_VERSION && - echo "Successfully built docker image ${FULL_IMAGE_NAME}" - -if [ "${PUBLISH}" = true ]; then - echo "Pushing docker image ${FULL_IMAGE_NAME} to ECR repository to AWS account ${AWS_DEPLOYMENT_ACCOUNT}" - if [ "${PUBLISH}" = true ]; then - echo "logging in" - aws ecr get-login-password --region "${AWS_REGION}" | docker login --username AWS --password-stdin "${AWS_REPOSITORY}" - fi - docker push "${FULL_IMAGE_NAME}" && - echo "Successfully pushed docker image ${FULL_IMAGE_NAME} to ECR repository" -fi \ No newline at end of file diff --git a/level_4/cognitive_architecture/database/graph_database/graph.py b/level_4/cognitive_architecture/database/graph_database/graph.py deleted file mode 100644 index 3b36e367b..000000000 --- a/level_4/cognitive_architecture/database/graph_database/graph.py +++ /dev/null @@ -1,682 +0,0 @@ -# import marvin -# from pydantic_settings import BaseSettings -# from marvin import ai_classifier -# marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY") -import logging -import os - -print(os.getcwd()) - -import networkx as nx - -from langchain.graphs import Neo4jGraph -import os -from dotenv import load_dotenv - -import openai -import instructor -from openai import OpenAI -from openai import AsyncOpenAI -import pickle - -from abc import ABC, abstractmethod - -# Adds response_model to ChatCompletion -# Allows the return of Pydantic model rather than raw JSON - -from pydantic import BaseModel, Field -from typing import List -from ...utils import format_dict, append_uuid_to_variable_names, create_edge_variable_mapping, create_node_variable_mapping -DEFAULT_PRESET = "promethai_chat" -preset_options = [DEFAULT_PRESET] -PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".") -load_dotenv() - -OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") -from ...config import Config - -config = Config() -config.load() - -print(config.model) -print(config.openai_key) - -OPENAI_API_KEY = config.openai_key - -aclient = instructor.patch(OpenAI()) - - -#Execute Cypher queries to create the user and memory components if they don't exist -# -# graph.query( -# f""" -# // Ensure the User node exists -# MERGE (user:User {{ userId: {user} }}) -# -# // Ensure the SemanticMemory node exists -# MERGE (semantic:SemanticMemory {{ userId: {user} }}) -# MERGE (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) -# -# // Ensure the EpisodicMemory node exists -# MERGE (episodic:EpisodicMemory {{ userId: {user} }}) -# MERGE (user)-[:HAS_EPISODIC_MEMORY]->(episodic) -# -# // Ensure the Buffer node exists -# MERGE (buffer:Buffer {{ userId: {user} }}) -# MERGE (user)-[:HAS_BUFFER]->(buffer) -# """ -# ) -# -# # Execute Cypher queries to create the cognitive components in the graph -# graph.query( -# f""" -# // Parsing the query into components and linking them to the user and memory components -# MERGE (user:User {{ userId: {user} }}) -# MERGE (semantic:SemanticMemory {{ userId: {user} }}) -# MERGE (episodic:EpisodicMemory {{ userId: {user} }}) -# MERGE (buffer:Buffer {{ userId: {user} }}) -# - # CREATE (action1:Event {{ description: 'take a walk', location: 'forest' }}) - # CREATE (action2:Event {{ description: 'get information', source: 'book' }}) - # CREATE (time:TimeContext {{ description: 'in the afternoon' }}) - # - # WITH user, semantic, episodic, buffer, action1, action2, time - # CREATE (knowledge:Knowledge {{ content: 'information from a book' }}) - # CREATE (semantic)-[:HAS_KNOWLEDGE]->(knowledge) - # CREATE (episodic)-[:HAS_EVENT]->(action1) - # CREATE (episodic)-[:HAS_EVENT]->(action2) - # CREATE (episodic)-[:HAS_TIME_CONTEXT]->(time) - # CREATE (buffer)-[:CURRENTLY_HOLDING]->(action1) - # CREATE (buffer)-[:CURRENTLY_HOLDING]->(action2) - # CREATE (buffer)-[:CURRENTLY_HOLDING]->(time) -# """ -# ) - - -class Node(BaseModel): - id: int - description: str - category: str - color: str ="blue" - memory_type: str - - - -class Edge(BaseModel): - source: int - target: int - description: str - color: str= "blue" - - -class KnowledgeGraph(BaseModel): - nodes: List[Node] = Field(..., default_factory=list) - edges: List[Edge] = Field(..., default_factory=list) - - -# - -def generate_graph(input) -> KnowledgeGraph: - out = aclient.chat.completions.create( - model="gpt-4-1106-preview", - messages=[ - { - "role": "user", - "content": f"""Use the given format to extract information from the following input: {input}. """, - - }, - { "role":"system", "content": """You are a top-tier algorithm - designed for extracting information in structured formats to build a knowledge graph. - - **Nodes** represent entities and concepts. They're akin to Wikipedia nodes. - - The aim is to achieve simplicity and clarity in the - knowledge graph, making it accessible for a vast audience. - ## 2. Labeling Nodes - - **Consistency**: Ensure you use basic or elementary types for node labels. - - For example, when you identify an entity representing a person, - always label it as **"person"**. - Avoid using more specific terms like "mathematician" or "scientist". - - Include event, entity, time, or action nodes to the category. - - Classify the memory type as episodic or semantic. - - **Node IDs**: Never utilize integers as node IDs. - Node IDs should be names or human-readable identifiers found in the text. - ## 3. Handling Numerical Data and Dates - - Numerical data, like age or other related information, - should be incorporated as attributes or properties of the respective nodes. - - **No Separate Nodes for Dates/Numbers**: - Do not create separate nodes for dates or numerical values. - Always attach them as attributes or properties of nodes. - - **Property Format**: Properties must be in a key-value format. - - **Quotation Marks**: Never use escaped single or double quotes within property values. - - **Naming Convention**: Use camelCase for property keys, e.g., `birthDate`. - ## 4. Coreference Resolution - - **Maintain Entity Consistency**: - When extracting entities, it's vital to ensure consistency. - If an entity, such as "John Doe", is mentioned multiple times - in the text but is referred to by different names or pronouns (e.g., "Joe", "he"), - always use the most complete identifier for that entity throughout the knowledge graph. - In this example, use "John Doe" as the entity ID. - Remember, the knowledge graph should be coherent and easily understandable, - so maintaining consistency in entity references is crucial. - ## 5. Strict Compliance - Adhere to the rules strictly. Non-compliance will result in termination."""} - ], - response_model=KnowledgeGraph, - ) - return out - -class AbstractGraphDB(ABC): - - @abstractmethod - def query(self, query: str, params=None): - pass - - # @abstractmethod - # def create_nodes(self, nodes: List[dict]): - # pass - # - # @abstractmethod - # def create_edges(self, edges: List[dict]): - # pass - # - # @abstractmethod - # def create_memory_type_relationships(self, nodes: List[dict], memory_type: str): - # pass - - -class Neo4jGraphDB(AbstractGraphDB): - def __init__(self, url, username, password): - self.graph = Neo4jGraph(url=url, username=username, password=password) - self.openai_key = config.openai_key - - - - def query(self, query, params=None): - return self.graph.query(query, params) - - - - def create_base_cognitive_architecture(self, user_id: str): - # Create the user and memory components if they don't exist - user_memory_cypher = f""" - MERGE (user:User {{userId: '{user_id}'}}) - MERGE (semantic:SemanticMemory {{description: 'SemanticMemory', userId: '{user_id}' }}) - MERGE (episodic:EpisodicMemory {{description: 'EpisodicMemory' , userId: '{user_id}'}}) - MERGE (buffer:Buffer {{description: 'Buffer' , userId: '{user_id}' }}) - MERGE (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) - MERGE (user)-[:HAS_EPISODIC_MEMORY]->(episodic) - MERGE (user)-[:HAS_BUFFER]->(buffer) - """ - return user_memory_cypher - - def user_query_to_edges_and_nodes(self, input: str) ->KnowledgeGraph: - return openai.ChatCompletion.create( - model=config.model, - messages=[ - { - "role": "user", - "content": f"""Use the given format to extract information from the following input: {input}. """, - - }, - {"role": "system", "content": """You are a top-tier algorithm - designed for extracting information in structured formats to build a knowledge graph. - - **Nodes** represent entities and concepts. They're akin to Wikipedia nodes. - - The aim is to achieve simplicity and clarity in the - knowledge graph, making it accessible for a vast audience. - ## 2. Labeling Nodes - - **Consistency**: Ensure you use basic or elementary types for node labels. - - For example, when you identify an entity representing a person, - always label it as **"person"**. - Avoid using more specific terms like "mathematician" or "scientist". - - Include event, entity, time, or action nodes to the category. - - Classify the memory type as episodic or semantic. - - **Node IDs**: Never utilize integers as node IDs. - Node IDs should be names or human-readable identifiers found in the text. - ## 3. Handling Numerical Data and Dates - - Numerical data, like age or other related information, - should be incorporated as attributes or properties of the respective nodes. - - **No Separate Nodes for Dates/Numbers**: - Do not create separate nodes for dates or numerical values. - Always attach them as attributes or properties of nodes. - - **Property Format**: Properties must be in a key-value format. - - **Quotation Marks**: Never use escaped single or double quotes within property values. - - **Naming Convention**: Use camelCase for property keys, e.g., `birthDate`. - ## 4. Coreference Resolution - - **Maintain Entity Consistency**: - When extracting entities, it's vital to ensure consistency. - If an entity, such as "John Doe", is mentioned multiple times - in the text but is referred to by different names or pronouns (e.g., "Joe", "he"), - always use the most complete identifier for that entity throughout the knowledge graph. - In this example, use "John Doe" as the entity ID. - Remember, the knowledge graph should be coherent and easily understandable, - so maintaining consistency in entity references is crucial. - ## 5. Strict Compliance - Adhere to the rules strictly. Non-compliance will result in termination."""} - ], - response_model=KnowledgeGraph, - ) - - def generate_create_statements_for_nodes_with_uuid(self, nodes, unique_mapping, base_node_mapping): - create_statements = [] - for node in nodes: - original_variable_name = base_node_mapping[node['id']] - unique_variable_name = unique_mapping[original_variable_name] - node_label = node['category'].capitalize() - properties = {k: v for k, v in node.items() if k not in ['id', 'category']} - try: - properties = format_dict(properties) - except: - pass - create_statements.append(f"CREATE ({unique_variable_name}:{node_label} {properties})") - return create_statements - - # Update the function to generate Cypher CREATE statements for edges with unique variable names - def generate_create_statements_for_edges_with_uuid(self, user_id, edges, unique_mapping, base_node_mapping): - create_statements = [] - with_statement = f"WITH {', '.join(unique_mapping.values())}, user , semantic, episodic, buffer" - create_statements.append(with_statement) - - for edge in edges: - # print("HERE IS THE EDGE", edge) - source_variable = unique_mapping[base_node_mapping[edge['source']]] - target_variable = unique_mapping[base_node_mapping[edge['target']]] - relationship = edge['description'].replace(" ", "_").upper() - create_statements.append(f"CREATE ({source_variable})-[:{relationship}]->({target_variable})") - return create_statements - - def generate_memory_type_relationships_with_uuid_and_time_context(self, user_id, nodes, unique_mapping, base_node_mapping): - create_statements = [] - with_statement = f"WITH {', '.join(unique_mapping.values())}, user, semantic, episodic, buffer" - create_statements.append(with_statement) - - # Loop through each node and create relationships based on memory_type - for node in nodes: - original_variable_name = base_node_mapping[node['id']] - unique_variable_name = unique_mapping[original_variable_name] - if node['memory_type'] == 'semantic': - create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_variable_name})") - elif node['memory_type'] == 'episodic': - create_statements.append(f"CREATE (episodic)-[:HAS_EVENT]->({unique_variable_name})") - if node['category'] == 'time': - create_statements.append(f"CREATE (buffer)-[:HAS_TIME_CONTEXT]->({unique_variable_name})") - - # Assuming buffer holds all actions and times - # if node['category'] in ['action', 'time']: - create_statements.append(f"CREATE (buffer)-[:CURRENTLY_HOLDING]->({unique_variable_name})") - - return create_statements - - async def generate_cypher_query_for_user_prompt_decomposition(self, user_id:str, query:str): - - graph: KnowledgeGraph = generate_graph(query) - graph_dic = graph.dict() - - node_variable_mapping = create_node_variable_mapping(graph_dic['nodes']) - edge_variable_mapping = create_edge_variable_mapping(graph_dic['edges']) - # Create unique variable names for each node - unique_node_variable_mapping = append_uuid_to_variable_names(node_variable_mapping) - unique_edge_variable_mapping = append_uuid_to_variable_names(edge_variable_mapping) - create_nodes_statements = self.generate_create_statements_for_nodes_with_uuid(graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping) - create_edges_statements =self.generate_create_statements_for_edges_with_uuid(user_id, graph_dic['edges'], unique_node_variable_mapping, node_variable_mapping) - - memory_type_statements_with_uuid_and_time_context = self.generate_memory_type_relationships_with_uuid_and_time_context(user_id, - graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping) - - # # Combine all statements - cypher_statements = [self.create_base_cognitive_architecture(user_id)] + create_nodes_statements + create_edges_statements + memory_type_statements_with_uuid_and_time_context - cypher_statements_joined = "\n".join(cypher_statements) - return cypher_statements_joined - - - def update_user_query_for_user_prompt_decomposition(self, user_id, user_query): - pass - - - def delete_all_user_memories(self, user_id): - try: - # Check if the user exists - user_exists = self.graph.query(f"MATCH (user:User {{userId: '{user_id}'}}) RETURN user") - if not user_exists: - return f"No user found with ID: {user_id}" - - # Delete all memory nodes and relationships for the given user - delete_query = f""" - MATCH (user:User {{userId: '{user_id}'}})-[r]-() - DELETE r - WITH user - MATCH (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) - MATCH (user)-[:HAS_EPISODIC_MEMORY]->(episodic) - MATCH (user)-[:HAS_BUFFER]->(buffer) - DETACH DELETE semantic, episodic, buffer - """ - self.graph.query(delete_query) - return f"All memories deleted for user ID: {user_id}" - except Exception as e: - return f"An error occurred: {str(e)}" - - def delete_specific_memory_type(self, user_id, memory_type): - try: - # Check if the user exists - user_exists = self.graph.query(f"MATCH (user:User {{userId: '{user_id}'}}) RETURN user") - if not user_exists: - return f"No user found with ID: {user_id}" - - # Validate memory type - if memory_type not in ['SemanticMemory', 'EpisodicMemory', 'Buffer']: - return "Invalid memory type. Choose from 'SemanticMemory', 'EpisodicMemory', or 'Buffer'." - - # Delete specific memory type nodes and relationships for the given user - delete_query = f""" - MATCH (user:User {{userId: '{user_id}'}})-[:HAS_{memory_type.upper()}]->(memory) - DETACH DELETE memory - """ - self.graph.query(delete_query) - return f"{memory_type} deleted for user ID: {user_id}" - except Exception as e: - return f"An error occurred: {str(e)}" - def retrieve_semantic_memory(self, user_id: str): - query = f""" - MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory) - MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge) - RETURN knowledge - """ - return self.query(query, params={"user_id": user_id}) - - def retrieve_episodic_memory(self, user_id: str): - query = """ - MATCH (user:User {userId: $user_id})-[:HAS_EPISODIC_MEMORY]->(episodic:EpisodicMemory) - MATCH (episodic)-[:HAS_EVENT]->(event) - RETURN event - """ - return self.query(query, params={"user_id": user_id}) - - def retrieve_buffer_memory(self, user_id: str): - query = """ - MATCH (user:User {userId: $user_id})-[:HAS_BUFFER]->(buffer:Buffer) - MATCH (buffer)-[:CURRENTLY_HOLDING]->(item) - RETURN item - """ - return self.query(query, params={"user_id": user_id}) - def generate_graph_semantic_memory_document_summary(self, document_summary : str, unique_graphdb_mapping_values: dict, document_namespace: str): - """ This function takes a document and generates a document summary in Semantic Memory""" - create_statements = [] - with_statement = f"WITH {', '.join(unique_graphdb_mapping_values.values())}, user, semantic, episodic, buffer" - create_statements.append(with_statement) - - # Loop through each node and create relationships based on memory_type - - create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_graphdb_mapping_values})") - - - return create_statements - - - def generate_document_summary(self, document_summary : str, unique_graphdb_mapping_values: dict, document_namespace: str): - """ This function takes a document and generates a document summary in Semantic Memory""" - - - # fetch namespace from postgres db - # fetch 1st and last page from vector store - # summarize the text, add document type - # write to postgres - create_statements = [] - with_statement = f"WITH {', '.join(unique_graphdb_mapping_values.values())}, user, semantic, episodic, buffer" - create_statements.append(with_statement) - - # Loop through each node and create relationships based on memory_type - - create_statements.append(f"CREATE (semantic)-[:HAS_KNOWLEDGE]->({unique_graphdb_mapping_values})") - - - return create_statements - - async def get_document_categories(self, user_id: str): - """ - Retrieve a list of categories for all documents associated with a given user. - - This function executes a Cypher query in a Neo4j database to fetch the categories - of all 'Document' nodes that are linked to the 'SemanticMemory' node of the specified user. - - Parameters: - - session (AsyncSession): The database session for executing the query. - - user_id (str): The unique identifier of the user. - - Returns: - - List[str]: A list of document categories associated with the user. - - Raises: - - Exception: If an error occurs during the database query execution. - """ - try: - query = f''' - MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document) - RETURN document.documentCategory AS category - ''' - logging.info(f"Generated Cypher query: {query}") - return query - - except Exception as e: - logging.error(f"An error occurred while retrieving document categories: {str(e)}") - return None - - async def get_document_ids(self, user_id: str, category: str): - """ - Retrieve a list of document IDs for a specific category associated with a given user. - - This function executes a Cypher query in a Neo4j database to fetch the IDs - of all 'Document' nodes in a specific category that are linked to the 'SemanticMemory' node of the specified user. - - Parameters: - - user_id (str): The unique identifier of the user. - - category (str): The specific document category to filter by. - - Returns: - - List[str]: A list of document IDs in the specified category associated with the user. - - Raises: - - Exception: If an error occurs during the database query execution. - """ - try: - query = f''' - MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document {{documentCategory: '{category}'}}) - RETURN document.d_id AS d_id - ''' - logging.info(f"Generated Cypher query: {query}") - return query - - except Exception as e: - logging.error(f"An error occurred while retrieving document IDs: {str(e)}") - return None - - def create_document_node_cypher(self, document_summary: dict, user_id: str) -> str: - """ - Generate a Cypher query to create a Document node linked to a SemanticMemory node for a user. - - Parameters: - - document_summary (dict): A dictionary containing the document's category, title, and summary. - - user_id (str): The unique identifier for the user. - - Returns: - - str: A Cypher query string with parameters. - - Raises: - - ValueError: If any required data is missing or invalid. - """ - - # Validate the input parameters - if not isinstance(document_summary, dict): - raise ValueError("The document_summary must be a dictionary.") - if not all(key in document_summary for key in ['DocumentCategory', 'Title', 'Summary', 'd_id']): - raise ValueError("The document_summary dictionary is missing required keys.") - if not isinstance(user_id, str) or not user_id: - raise ValueError("The user_id must be a non-empty string.") - - # Escape single quotes in the document summary data (if not using parameters) - title = document_summary['Title'].replace("'", "\\'") - summary = document_summary['Summary'].replace("'", "\\'") - document_category = document_summary['DocumentCategory'].replace("'", "\\'") - d_id = document_summary['d_id'].replace("'", "\\'") - - # Generate the Cypher query using parameters - cypher_query = f''' - // Ensure the User node exists - MERGE (user:User {{ userId: '{user_id}' }}) - - // Ensure the SemanticMemory node exists and is connected to the User - MERGE (semantic:SemanticMemory {{ userId: '{user_id}' }}) - MERGE (user)-[:HAS_SEMANTIC_MEMORY]->(semantic) - - // Create the Document node with its properties - CREATE (document:Document {{ - title: '{title}', - summary: '{summary}', - documentCategory: '{document_category}', - d_id: '{d_id}' - }}) - - // Link the Document node to the SemanticMemory node - CREATE (semantic)-[:HAS_DOCUMENT]->(document) - ''' - logging.info(f"Generated Cypher query: {cypher_query}") - - return cypher_query - - def update_document_node_with_namespace(self, user_id: str, vectordb_namespace: str, document_id: str): - # Generate the Cypher query - cypher_query = f''' - MATCH (user:User {{userId: '{user_id}' }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document {{d_id: '{document_id}'}}) - SET document.vectordbNamespace = '{vectordb_namespace}' - RETURN document - ''' - - - return cypher_query - - def get_namespaces_by_document_category(self, user_id: str, category: str): - """ - Retrieve a list of Vectordb namespaces for documents of a specified category associated with a given user. - - This function executes a Cypher query in a Neo4j database to fetch the 'vectordbNamespace' of all 'Document' nodes - that are linked to the 'SemanticMemory' node of the specified user and belong to the specified category. - - Parameters: - - user_id (str): The unique identifier of the user. - - category (str): The category to filter the documents by. - - Returns: - - List[str]: A list of Vectordb namespaces for documents in the specified category. - - Raises: - - Exception: If an error occurs during the database query execution. - """ - try: - query = f''' - MATCH (user:User {{userId: '{user_id}'}})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document) - WHERE document.documentCategory = '{category}' - RETURN document.vectordbNamespace AS namespace - ''' - result = self.query(query) - namespaces = [record["namespace"] for record in result] - return namespaces - except Exception as e: - logging.error(f"An error occurred while retrieving namespaces by document category: {str(e)}") - return None - - -class NetworkXGraphDB: - def __init__(self, filename='networkx_graph.pkl'): - self.filename = filename - try: - self.graph = self.load_graph() # Attempt to load an existing graph - except (FileNotFoundError, EOFError, pickle.UnpicklingError): - self.graph = nx.Graph() # Create a new graph if loading failed - - def save_graph(self): - """ Save the graph to a file using pickle """ - with open(self.filename, 'wb') as f: - pickle.dump(self.graph, f) - - def load_graph(self): - """ Load the graph from a file using pickle """ - with open(self.filename, 'rb') as f: - return pickle.load(f) - - def create_base_cognitive_architecture(self, user_id: str): - # Add nodes for user and memory types if they don't exist - self.graph.add_node(user_id, type='User') - self.graph.add_node(f"{user_id}_semantic", type='SemanticMemory') - self.graph.add_node(f"{user_id}_episodic", type='EpisodicMemory') - self.graph.add_node(f"{user_id}_buffer", type='Buffer') - - # Add edges to connect user to memory types - self.graph.add_edge(user_id, f"{user_id}_semantic", relation='HAS_SEMANTIC_MEMORY') - self.graph.add_edge(user_id, f"{user_id}_episodic", relation='HAS_EPISODIC_MEMORY') - self.graph.add_edge(user_id, f"{user_id}_buffer", relation='HAS_BUFFER') - - self.save_graph() # Save the graph after modifying it - - def delete_all_user_memories(self, user_id: str): - # Remove nodes and edges related to the user's memories - for memory_type in ['semantic', 'episodic', 'buffer']: - memory_node = f"{user_id}_{memory_type}" - self.graph.remove_node(memory_node) - - self.save_graph() # Save the graph after modifying it - - def delete_specific_memory_type(self, user_id: str, memory_type: str): - # Remove a specific type of memory node and its related edges - memory_node = f"{user_id}_{memory_type.lower()}" - if memory_node in self.graph: - self.graph.remove_node(memory_node) - - self.save_graph() # Save the graph after modifying it - - def retrieve_semantic_memory(self, user_id: str): - return [n for n in self.graph.neighbors(f"{user_id}_semantic")] - - def retrieve_episodic_memory(self, user_id: str): - return [n for n in self.graph.neighbors(f"{user_id}_episodic")] - - def retrieve_buffer_memory(self, user_id: str): - return [n for n in self.graph.neighbors(f"{user_id}_buffer")] - - def generate_graph_semantic_memory_document_summary(self, document_summary, unique_graphdb_mapping_values, document_namespace, user_id): - for node, attributes in unique_graphdb_mapping_values.items(): - self.graph.add_node(node, **attributes) - self.graph.add_edge(f"{user_id}_semantic", node, relation='HAS_KNOWLEDGE') - self.save_graph() - - def generate_document_summary(self, document_summary, unique_graphdb_mapping_values, document_namespace, user_id): - self.generate_graph_semantic_memory_document_summary(document_summary, unique_graphdb_mapping_values, document_namespace, user_id) - - async def get_document_categories(self, user_id): - return [self.graph.nodes[n]['category'] for n in self.graph.neighbors(f"{user_id}_semantic") if 'category' in self.graph.nodes[n]] - - async def get_document_ids(self, user_id, category): - return [n for n in self.graph.neighbors(f"{user_id}_semantic") if self.graph.nodes[n].get('category') == category] - - def create_document_node(self, document_summary, user_id): - d_id = document_summary['d_id'] - self.graph.add_node(d_id, **document_summary) - self.graph.add_edge(f"{user_id}_semantic", d_id, relation='HAS_DOCUMENT') - self.save_graph() - - def update_document_node_with_namespace(self, user_id, vectordb_namespace, document_id): - if self.graph.has_node(document_id): - self.graph.nodes[document_id]['vectordbNamespace'] = vectordb_namespace - self.save_graph() - - def get_namespaces_by_document_category(self, user_id, category): - return [self.graph.nodes[n].get('vectordbNamespace') for n in self.graph.neighbors(f"{user_id}_semantic") if self.graph.nodes[n].get('category') == category] - -class GraphDBFactory: - def create_graph_db(self, db_type, **kwargs): - if db_type == 'neo4j': - return Neo4jGraphDB(**kwargs) - elif db_type == 'networkx': - return NetworkXGraphDB(**kwargs) - else: - raise ValueError(f"Unsupported database type: {db_type}") - - - - diff --git a/level_4/cognitive_architecture/database/vectordb/loaders/loaders.py b/level_4/cognitive_architecture/database/vectordb/loaders/loaders.py deleted file mode 100644 index a64efb18d..000000000 --- a/level_4/cognitive_architecture/database/vectordb/loaders/loaders.py +++ /dev/null @@ -1,83 +0,0 @@ -from io import BytesIO -import fitz -import os -import sys - -from cognitive_architecture.database.vectordb.chunkers.chunkers import chunk_data - -from langchain.document_loaders import UnstructuredURLLoader -from langchain.document_loaders import DirectoryLoader -import logging -import os -from langchain.document_loaders import TextLoader -import requests -async def _document_loader( observation: str, loader_settings: dict): - - document_format = loader_settings.get("format", "text") - loader_strategy = loader_settings.get("strategy", "VANILLA") - chunk_size = loader_settings.get("chunk_size", 500) - chunk_overlap = loader_settings.get("chunk_overlap", 20) - - - logging.info("LOADER SETTINGS %s", loader_settings) - - list_of_docs = loader_settings["path"] - chunked_doc = [] - - if loader_settings.get("source") == "URL": - for file in list_of_docs: - if document_format == "PDF": - logging.info("File is %s", file) - pdf_response = requests.get(file) - pdf_stream = BytesIO(pdf_response.content) - with fitz.open(stream=pdf_stream, filetype='pdf') as doc: - file_content = "" - for page in doc: - file_content += page.get_text() - pages = chunk_data(chunk_strategy=loader_strategy, source_data=file_content, chunk_size=chunk_size, - chunk_overlap=chunk_overlap) - - chunked_doc.append(pages) - - elif document_format == "TEXT": - loader = UnstructuredURLLoader(urls=file) - file_content = loader.load() - pages = chunk_data(chunk_strategy=loader_strategy, source_data=file_content, chunk_size=chunk_size, - chunk_overlap=chunk_overlap) - chunked_doc.append(pages) - - elif loader_settings.get("source") == "DEVICE": - - current_directory = os.getcwd() - logging.info("Current Directory: %s", current_directory) - - loader = DirectoryLoader(".data", recursive=True) - if document_format == "PDF": - # loader = SimpleDirectoryReader(".data", recursive=True, exclude_hidden=True) - documents = loader.load() - pages = chunk_data(chunk_strategy=loader_strategy, source_data=str(documents), chunk_size=chunk_size, - chunk_overlap=chunk_overlap) - logging.info("Documents: %s", documents) - # pages = documents.load_and_split() - chunked_doc.append(pages) - - - elif document_format == "TEXT": - documents = loader.load() - pages = chunk_data(chunk_strategy=loader_strategy, source_data=str(documents), chunk_size=chunk_size, - chunk_overlap=chunk_overlap) - logging.info("Documents: %s", documents) - # pages = documents.load_and_split() - chunked_doc.append(pages) - - else: - raise ValueError(f"Error: ") - return chunked_doc - - - - - - - - diff --git a/level_4/cognitive_architecture/shared/__init__.py b/level_4/cognitive_architecture/shared/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/level_4/main.py b/level_4/main.py deleted file mode 100644 index ac0492f95..000000000 --- a/level_4/main.py +++ /dev/null @@ -1,401 +0,0 @@ - -from pydantic import BaseModel, Field -from cognitive_architecture.database.graph_database.graph import Neo4jGraphDB -from cognitive_architecture.database.postgres.models.memory import MemoryModel -from cognitive_architecture.classifiers.classifier import classify_documents -import os -from dotenv import load_dotenv -from cognitive_architecture.database.postgres.database_crud import session_scope -from cognitive_architecture.database.postgres.database import AsyncSessionLocal -from cognitive_architecture.utils import generate_letter_uuid -import instructor -from openai import OpenAI -from cognitive_architecture.vectorstore_manager import Memory -from cognitive_architecture.database.postgres.database_crud import fetch_job_id -import uuid -from cognitive_architecture.database.postgres.models.sessions import Session -from cognitive_architecture.database.postgres.models.operation import Operation -from cognitive_architecture.database.postgres.database_crud import session_scope, add_entity, update_entity, fetch_job_id -from cognitive_architecture.database.postgres.models.metadatas import MetaDatas -from cognitive_architecture.database.postgres.models.docs import DocsModel -from cognitive_architecture.database.postgres.models.memory import MemoryModel -from cognitive_architecture.database.postgres.models.user import User -from cognitive_architecture.classifiers.classifier import classify_call -aclient = instructor.patch(OpenAI()) -DEFAULT_PRESET = "promethai_chat" -preset_options = [DEFAULT_PRESET] -PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".") -load_dotenv() -OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") -from cognitive_architecture.config import Config -config = Config() -config.load() -from cognitive_architecture.utils import get_document_names -from sqlalchemy.orm import selectinload, joinedload, contains_eager -import logging -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select -from cognitive_architecture.utils import get_document_names, generate_letter_uuid, get_memory_name_by_doc_id, get_unsumarized_vector_db_namespace, get_vectordb_namespace, get_vectordb_document_name - -async def fetch_document_vectordb_namespace(session: AsyncSession, user_id: str, namespace_id:str): - memory = await Memory.create_memory(user_id, session, namespace=namespace_id, memory_label=namespace_id) - - - # Managing memory attributes - existing_user = await Memory.check_existing_user(user_id, session) - print("here is the existing user", existing_user) - await memory.manage_memory_attributes(existing_user) - print("Namespace id is %s", namespace_id) - await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) - - dynamic_memory_class = getattr(memory, namespace_id.lower(), None) - - methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] - - if dynamic_memory_class is not None: - for method_name in methods_to_add: - await memory.add_method_to_class(dynamic_memory_class, method_name) - print(f"Memory method {method_name} has been added") - else: - print(f"No attribute named in memory.") - - print("Available memory classes:", await memory.list_memory_classes()) - result = await memory.dynamic_method_call(dynamic_memory_class, 'fetch_memories', - observation="placeholder", search_type="summary") - - return result, namespace_id - -async def load_documents_to_vectorstore(session: AsyncSession, user_id: str, content:str=None, job_id:str=None, loader_settings:dict=None): - namespace_id = str(generate_letter_uuid()) + "_" + "SEMANTICMEMORY" - namespace_class = namespace_id + "_class" - - logging.info("Namespace created with id %s", namespace_id) - try: - new_user = User(id=user_id) - await add_entity(session, new_user) - except: - pass - - if job_id is None: - job_id = str(uuid.uuid4()) - - await add_entity( - session, - Operation( - id=job_id, - user_id=user_id, - operation_status="RUNNING", - operation_type="DATA_LOAD", - ), - ) - memory = await Memory.create_memory(user_id, session, namespace=namespace_id, job_id=job_id, memory_label=namespace_id) - - if content is not None: - document_names = [content[:30]] - if loader_settings is not None: - document_names = get_document_names(loader_settings.get("path", "None")) - for doc in document_names: - await add_entity( - session, - DocsModel( - id=str(uuid.uuid4()), - operation_id=job_id, - graph_summary= False, - doc_name=doc - ) - ) - # Managing memory attributes - existing_user = await Memory.check_existing_user(user_id, session) - print("here is the existing user", existing_user) - await memory.manage_memory_attributes(existing_user) - params = { - "version": "1.0", - "agreement_id": "AG123456", - "privacy_policy": "https://example.com/privacy", - "terms_of_service": "https://example.com/terms", - "format": "json", - "schema_version": "1.1", - "checksum": "a1b2c3d4e5f6", - "owner": "John Doe", - "license": "MIT", - "validity_start": "2023-08-01", - "validity_end": "2024-07-31", - } - print("Namespace id is %s", namespace_id) - await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) - - dynamic_memory_class = getattr(memory, namespace_class.lower(), None) - - methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] - - if dynamic_memory_class is not None: - for method_name in methods_to_add: - await memory.add_method_to_class(dynamic_memory_class, method_name) - print(f"Memory method {method_name} has been added") - else: - print(f"No attribute named in memory.") - - print("Available memory classes:", await memory.list_memory_classes()) - result = await memory.dynamic_method_call(dynamic_memory_class, 'add_memories', - observation=content, params=params, loader_settings=loader_settings) - - await update_entity(session, Operation, job_id, "SUCCESS") - # return result, namespace_id - - -async def user_query_to_graph_db(session: AsyncSession, user_id: str, query_input: str): - - try: - new_user = User(id=user_id) - await add_entity(session, new_user) - except: - pass - - job_id = str(uuid.uuid4()) - - await add_entity( - session, - Operation( - id=job_id, - user_id=user_id, - operation_status="RUNNING", - operation_type="USER_QUERY_TO_GRAPH_DB", - ), - ) - - neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, password=config.graph_database_password) - cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id,query_input) - result = neo4j_graph_db.query(cypher_query) - - await update_entity(session, Operation, job_id, "SUCCESS") - - return result - - - - -async def add_documents_to_graph_db(session: AsyncSession, user_id: str= None, loader_settings:dict=None, stupid_local_testing_flag=False): #clean this up Vasilije, don't be sloppy - """""" - - - # try: - # await update_document_vectordb_namespace(postgres_session, user_id) - memory_names, docs = await get_unsumarized_vector_db_namespace(session, user_id) - logging.info("Memory names are", memory_names) - logging.info("Docs are", docs) - for doc, memory_name in zip(docs, memory_names): - doc_name, doc_id = doc - # if stupid_local_testing_flag: - # classification = [{ - # "DocumentCategory": "Literature", - # "Title": "Bartleby, the Scrivener", - # "Summary": "The document is a narrative about an enigmatic copyist named Bartleby who works in a law office. Despite initially being a diligent employee, Bartleby begins to refuse tasks with the phrase 'I would prefer not to' and eventually stops working altogether. His passive resistance and mysterious behavior confound the narrator, who is also his employer. Bartleby's refusal to leave the office leads to various complications, and he is eventually taken to the Tombs as a vagrant. The story ends with Bartleby's death and the revelation that he may have previously worked in the Dead Letter Office, which adds a layer of poignancy to his character.", - # "d_id": "2a5c571f-bad6-4649-a4ac-36e4bb4f34cd" - # }, - # { - # "DocumentCategory": "Science", - # "Title": "The Mysterious World of Quantum Mechanics", - # "Summary": "This article delves into the fundamentals of quantum mechanics, exploring its paradoxical nature where particles can exist in multiple states simultaneously. It discusses key experiments and theories that have shaped our understanding of the quantum world, such as the double-slit experiment, Schrödinger's cat, and quantum entanglement. The piece also touches upon the implications of quantum mechanics for future technology, including quantum computing and cryptography.", - # "d_id": "f4e2c3b1-4567-8910-11a2-b3c4d5e6f7g8" - # }, - # { - # "DocumentCategory": "History", - # "Title": "The Rise and Fall of the Roman Empire", - # "Summary": "This essay provides an overview of the Roman Empire's history, from its foundation to its eventual decline. It examines the political, social, and economic factors that contributed to the empire's expansion and success, as well as those that led to its downfall. Key events and figures such as Julius Caesar, the Punic Wars, and the transition from republic to empire are discussed. The essay concludes with an analysis of the empire's lasting impact on Western civilization.", - # "d_id": "8h7g6f5e-4d3c-2b1a-09e8-d7c6b5a4f3e2" - # }, - # { - # "DocumentCategory": "Technology", - # "Title": "The Future of Artificial Intelligence", - # "Summary": "This report explores the current state and future prospects of artificial intelligence (AI). It covers the evolution of AI from simple algorithms to advanced neural networks capable of deep learning. The document discusses various applications of AI in industries such as healthcare, finance, and transportation, as well as ethical considerations and potential risks associated with AI development. Predictions for future advancements and their societal impact are also presented.", - # "d_id": "3c2b1a09-d8e7-f6g5-h4i3-j1k2l3m4n5o6" - # }, - # { - # "DocumentCategory": "Economics", - # "Title": "Global Economic Trends and Predictions", - # "Summary": "This analysis examines major trends in the global economy, including the rise of emerging markets, the impact of technology on job markets, and shifts in international trade. It delves into the economic effects of recent global events, such as pandemics and geopolitical conflicts, and discusses how these might shape future economic policies and practices. The document provides predictions for economic growth, inflation rates, and currency fluctuations in the coming years.", - # "d_id": "7k6j5h4g-3f2e-1d0c-b8a9-m7n6o5p4q3r2" - # } - # ] - # for classification in classification: - # - # neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, - # password=config.graph_database_password) - # rs = neo4j_graph_db.create_document_node_cypher(classification, user_id) - # neo4j_graph_db.query(rs, classification) - # - # # select doc from the store - # neo4j_graph_db.update_document_node_with_namespace(user_id, vectordb_namespace=memory_name, document_id=doc_id) - # else: - try: - classification_content = await fetch_document_vectordb_namespace(session, user_id, memory_name) - except: - classification_content = "None" - # - # classification = await classify_documents(doc_name, document_id =doc_id, content=classification_content) - # - # logging.info("Classification is", str(classification)) - # neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, - # password=config.graph_database_password) - # rs = neo4j_graph_db.create_document_node_cypher(classification, user_id) - # neo4j_graph_db.query(rs, classification) - # - # # select doc from the store - # neo4j_graph_db.update_document_node_with_namespace(user_id, vectordb_namespace=memory_name, - # document_id=doc_id) - await update_entity(session, DocsModel, doc_id, True) - # except: - # pass - -class ResponseString(BaseModel): - response: str = Field(..., default_factory=list) - - -# - -def generate_graph(input) -> ResponseString: - out = aclient.chat.completions.create( - model="gpt-4-1106-preview", - messages=[ - { - "role": "user", - "content": f"""Use the given context to answer query and use help of associated context: {input}. """, - - }, - { "role":"system", "content": """You are a top-tier algorithm - designed for using context summaries based on cognitive psychology to answer user queries, and provide a simple response. - Do not mention anything explicit about cognitive architecture, but use the context to answer the query."""} - ], - response_model=ResponseString, - ) - return out -async def user_context_enrichment(session, user_id:str, query:str)->str: - """ - Asynchronously enriches the user context by integrating various memory systems and document classifications. - - This function uses cognitive architecture to access and manipulate different memory systems (semantic, episodic, and procedural) associated with a user. It fetches memory details from a Neo4j graph database, classifies document categories based on the user's query, and retrieves document IDs for relevant categories. The function also dynamically manages memory attributes and methods, extending the context with document store information to enrich the user's query response. - - Parameters: - - session (AsyncSession): The database session for executing queries. - - user_id (str): The unique identifier of the user. - - query (str): The original query from the user. - - Returns: - - str: The final enriched context after integrating various memory systems and document classifications. - - The function performs several key operations: - 1. Retrieves semantic and episodic memory details for the user from the Neo4j graph database. - 2. Logs and classifies document categories relevant to the user's query. - 3. Fetches document IDs from Neo4j and corresponding memory names from a PostgreSQL database. - 4. Dynamically manages memory attributes and methods, including the addition of methods like 'add_memories', 'fetch_memories', and 'delete_memories' to the memory class. - 5. Extends the context with document store information relevant to the user's query. - 6. Generates and logs the final result after processing and integrating all information. - - Raises: - - Exception: Propagates any exceptions that occur during database operations or memory management. - - Example Usage: - ```python - enriched_context = await user_context_enrichment(session, "user123", "How does cognitive architecture work?") - ``` - """ - neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, - password=config.graph_database_password) - - semantic_mem = neo4j_graph_db.retrieve_semantic_memory(user_id=user_id) - episodic_mem = neo4j_graph_db.retrieve_episodic_memory(user_id=user_id) - context = f""" You are a memory system that uses cognitive architecture to enrich the user context. - You have access to the following information: - EPISODIC MEMORY: {episodic_mem} - SEMANTIC MEMORY: {semantic_mem} - PROCEDURAL MEMORY: NULL - The original user query: {query} - """ - - - logging.info("Context from graphdb is %s", context) - document_categories_query = await neo4j_graph_db.get_document_categories(user_id=user_id) - result = neo4j_graph_db.query(document_categories_query) - categories = [record["category"] for record in result] - logging.info('Possible document categories are', str(categories)) - relevant_categories = await classify_call( query= query, context = context, document_types=str(categories)) - logging.info("Relevant categories after the classifier are %s", relevant_categories) - - get_doc_ids = await neo4j_graph_db.get_document_ids(user_id, relevant_categories) - - postgres_id = neo4j_graph_db.query(get_doc_ids) - logging.info("Postgres ids are %s", postgres_id) - namespace_id = await get_memory_name_by_doc_id(session, postgres_id[0]["d_id"]) - logging.info("Namespace ids are %s", namespace_id) - namespace_id = namespace_id[0] - namespace_class = namespace_id + "_class" - - memory = await Memory.create_memory(user_id, session, namespace=namespace_id, job_id="23232", - memory_label=namespace_id) - - existing_user = await Memory.check_existing_user(user_id, session) - print("here is the existing user", existing_user) - await memory.manage_memory_attributes(existing_user) - - print("Namespace id is %s", namespace_id) - await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) - - dynamic_memory_class = getattr(memory, namespace_class.lower(), None) - - methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] - - if dynamic_memory_class is not None: - for method_name in methods_to_add: - await memory.add_method_to_class(dynamic_memory_class, method_name) - print(f"Memory method {method_name} has been added") - else: - print(f"No attribute named in memory.") - - print("Available memory classes:", await memory.list_memory_classes()) - result = await memory.dynamic_method_call(dynamic_memory_class, 'fetch_memories', - observation=query) - context_extension = "Document store information that can help and enrich the anwer is: " + str(result) - entire_context = context + context_extension - final_result = generate_graph(entire_context) - logging.info("Final result is %s", final_result) - - return final_result - - - - -async def main(): - user_id = "user" - - async with session_scope(AsyncSessionLocal()) as session: - # out = await get_vectordb_namespace(session, user_id) - params = { - "version": "1.0", - "agreement_id": "AG123456", - "privacy_policy": "https://example.com/privacy", - "terms_of_service": "https://example.com/terms", - "format": "json", - "schema_version": "1.1", - "checksum": "a1b2c3d4e5f6", - "owner": "John Doe", - "license": "MIT", - "validity_start": "2023-08-01", - "validity_end": "2024-07-31", - } - loader_settings = { - "format": "PDF", - "source": "DEVICE", - "path": [".data"], - "strategy": "SUMMARY", - } - await load_documents_to_vectorstore(session, user_id, loader_settings=loader_settings) - await user_query_to_graph_db(session, user_id, "I walked in the forest yesterday and added to my list I need to buy some milk in the store and get a summary from a classical book i read yesterday") - await add_documents_to_graph_db(session, user_id, loader_settings=loader_settings) - await user_context_enrichment(session, user_id, query="Tell me about the book I read yesterday") - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) - diff --git a/main.py b/main.py new file mode 100644 index 000000000..0aa2bceca --- /dev/null +++ b/main.py @@ -0,0 +1,785 @@ +from typing import Optional, List + +from neo4j.exceptions import Neo4jError +from pydantic import BaseModel, Field +from cognitive_architecture.database.graph_database.graph import Neo4jGraphDB +from cognitive_architecture.database.postgres.models.memory import MemoryModel +from cognitive_architecture.classifiers.classifier import classify_documents +import os +from dotenv import load_dotenv +from cognitive_architecture.database.postgres.database_crud import session_scope, update_entity_graph_summary +from cognitive_architecture.database.postgres.database import AsyncSessionLocal +from cognitive_architecture.utils import generate_letter_uuid +import instructor +from openai import OpenAI +from cognitive_architecture.vectorstore_manager import Memory +from cognitive_architecture.database.postgres.database_crud import fetch_job_id +import uuid +from cognitive_architecture.database.postgres.models.sessions import Session +from cognitive_architecture.database.postgres.models.operation import Operation +from cognitive_architecture.database.postgres.database_crud import session_scope, add_entity, update_entity, fetch_job_id +from cognitive_architecture.database.postgres.models.metadatas import MetaDatas +from cognitive_architecture.database.postgres.models.docs import DocsModel +from cognitive_architecture.database.postgres.models.memory import MemoryModel +from cognitive_architecture.database.postgres.models.user import User +from cognitive_architecture.classifiers.classifier import classify_call +aclient = instructor.patch(OpenAI()) +DEFAULT_PRESET = "promethai_chat" +preset_options = [DEFAULT_PRESET] +PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".") +load_dotenv() +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") +from cognitive_architecture.config import Config +config = Config() +config.load() +from cognitive_architecture.utils import get_document_names +from sqlalchemy.orm import selectinload, joinedload, contains_eager +import logging +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from cognitive_architecture.utils import get_document_names, generate_letter_uuid, get_memory_name_by_doc_id, get_unsumarized_vector_db_namespace, get_vectordb_namespace, get_vectordb_document_name +from cognitive_architecture.shared.language_processing import translate_text, detect_language +from cognitive_architecture.classifiers.classifier import classify_user_input + +async def fetch_document_vectordb_namespace(session: AsyncSession, user_id: str, namespace_id:str, doc_id:str=None): + logging.info("user id is", user_id) + memory = await Memory.create_memory(user_id, session, namespace=namespace_id, memory_label=namespace_id) + + + # Managing memory attributes + existing_user = await Memory.check_existing_user(user_id, session) + print("here is the existing user", existing_user) + await memory.manage_memory_attributes(existing_user) + print("Namespace id is %s", namespace_id) + await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) + namespace_class = namespace_id + "_class" + + dynamic_memory_class = getattr(memory, namespace_class.lower(), None) + + methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] + + if dynamic_memory_class is not None: + for method_name in methods_to_add: + await memory.add_method_to_class(dynamic_memory_class, method_name) + print(f"Memory method {method_name} has been added") + else: + print(f"No attribute named in memory.") + + print("Available memory classes:", await memory.list_memory_classes()) + result = await memory.dynamic_method_call(dynamic_memory_class, 'fetch_memories', + observation="placeholder", search_type="summary_filter_by_object_name", params=doc_id) + logging.info("Result is %s", str(result)) + + return result, namespace_id + + + +async def load_documents_to_vectorstore(session: AsyncSession, user_id: str, content:str=None, job_id:str=None, loader_settings:dict=None, memory_type:str="PRIVATE"): + namespace_id = str(generate_letter_uuid()) + "_" + "SEMANTICMEMORY" + namespace_class = namespace_id + "_class" + + logging.info("Namespace created with id %s", namespace_id) + try: + new_user = User(id=user_id) + await add_entity(session, new_user) + except: + pass + if job_id is None: + job_id = str(uuid.uuid4()) + + await add_entity( + session, + Operation( + id=job_id, + user_id=user_id, + operation_status="RUNNING", + operation_type="DATA_LOAD", + ), + ) + memory = await Memory.create_memory(user_id, session, namespace=namespace_id, job_id=job_id, memory_label=namespace_id) + if content is not None: + document_names = [content[:30]] + if loader_settings is not None: + document_source = loader_settings.get("document_names") if isinstance(loader_settings.get("document_names"), + list) else loader_settings.get("path", "None") + logging.info("Document source is %s", document_source) + # try: + document_names = get_document_names(document_source[0]) + logging.info(str(document_names)) + # except: + # document_names = document_source + for doc in document_names: + from cognitive_architecture.shared.language_processing import translate_text, detect_language + #translates doc titles to english + if loader_settings is not None: + logging.info("Detecting language of document %s", doc) + loader_settings["single_document_path"]= loader_settings.get("path", "None")[0] +"/"+doc + logging.info("Document path is %s", loader_settings.get("single_document_path", "None")) + memory_category = loader_settings.get("memory_category", "PUBLIC") + if loader_settings is None: + memory_category = "CUSTOM" + if detect_language(doc) != "en": + doc_ = doc.strip(".pdf").replace("-", " ") + doc_ = translate_text(doc_, "sr", "en") + else: + doc_=doc + doc_id = str(uuid.uuid4()) + + logging.info("Document name is %s", doc_) + await add_entity( + session, + DocsModel( + id=doc_id, + operation_id=job_id, + graph_summary= False, + memory_category= memory_category, + doc_name=doc_ + ) + ) + # Managing memory attributes + existing_user = await Memory.check_existing_user(user_id, session) + await memory.manage_memory_attributes(existing_user) + params = { + "doc_id":doc_id + } + print("Namespace id is %s", namespace_id) + await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) + + dynamic_memory_class = getattr(memory, namespace_class.lower(), None) + + methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] + + if dynamic_memory_class is not None: + for method_name in methods_to_add: + await memory.add_method_to_class(dynamic_memory_class, method_name) + print(f"Memory method {method_name} has been added") + else: + print(f"No attribute named in memory.") + + print("Available memory classes:", await memory.list_memory_classes()) + result = await memory.dynamic_method_call(dynamic_memory_class, 'add_memories', + observation=content, params=params, loader_settings=loader_settings) + await update_entity(session, Operation, job_id, "SUCCESS") + +async def user_query_to_graph_db(session: AsyncSession, user_id: str, query_input: str): + + try: + new_user = User(id=user_id) + await add_entity(session, new_user) + except: + pass + + job_id = str(uuid.uuid4()) + + await add_entity( + session, + Operation( + id=job_id, + user_id=user_id, + operation_status="RUNNING", + operation_type="USER_QUERY_TO_GRAPH_DB", + ), + ) + + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, password=config.graph_database_password) + cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id,query_input) + result = neo4j_graph_db.query(cypher_query) + neo4j_graph_db.close() + + await update_entity(session, Operation, job_id, "SUCCESS") + + return result + +# async def add_documents_to_graph_db(session: AsyncSession, user_id: Optional[str] = None, +# document_memory_types: Optional[List[str]] = None): +# """ Add documents to a graph database, handling multiple memory types """ +# if document_memory_types is None: +# document_memory_types = ['PUBLIC'] +# +# memory_type_actions = { +# 'PUBLIC': {'topic': 'PublicMemory', 'additional_action': None}, +# 'SEMANTIC': {'topic': 'SemanticMemory', 'additional_action': None} +# } +# +# try: +# memory_details, docs = await get_unsumarized_vector_db_namespace(session, user_id) +# filtered_memory_details = [detail for detail in memory_details if detail[1] in document_memory_types] +# +# neo4j_graph_db = None +# for doc in docs: +# doc_name, doc_id = doc +# try: +# classification_content = await fetch_document_vectordb_namespace( +# session, user_id, filtered_memory_details[0][0], doc_id) +# retrieval_chunks = [item['text'] for item in +# classification_content[0]['data']['Get'][filtered_memory_details[0][0]]] +# except Exception as e: +# logging.error(f"Error fetching document content: {e}") +# retrieval_chunks = "" +# +# concatenated_retrievals = ' '.join(retrieval_chunks) +# classification = await classify_documents(doc_name, document_id=doc_id, content=concatenated_retrievals) +# +# for memory_type in document_memory_types: +# if memory_type in memory_type_actions: +# if neo4j_graph_db is None: +# neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, +# username=config.graph_database_username, +# password=config.graph_database_password) +# topic = memory_type_actions[memory_type]['topic'] +# ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic=topic) +# for id in ids: +# memory_id = id.get('memoryId') +# if memory_id: +# rs = neo4j_graph_db.create_document_node_cypher(classification, user_id, +# public_memory_id=memory_id if memory_type == 'PUBLIC' else None) +# neo4j_graph_db.query(rs) +# +# if filtered_memory_details[0][1] == memory_type: +# neo4j_graph_db.update_document_node_with_db_ids( +# vectordb_namespace=filtered_memory_details[0][0], +# document_id=doc_id, user_id=user_id if memory_type != "PUBLIC" else None) +# except Exception as e: +# logging.error(f"An error occurred: {e}") +# return e + + +async def add_documents_to_graph_db(session: AsyncSession, user_id: str= None, document_memory_types:list=None): + """""" + if document_memory_types is None: + document_memory_types = ['PUBLIC'] + + logging.info("Document memory types are", document_memory_types) + try: + # await update_document_vectordb_namespace(postgres_session, user_id) + memory_details, docs = await get_unsumarized_vector_db_namespace(session, user_id) + + logging.info("Docs are", docs) + memory_details= [detail for detail in memory_details if detail[1] in document_memory_types] + logging.info("Memory details", memory_details) + for doc in docs: + logging.info("Memory names are", memory_details) + doc_name, doc_id = doc + logging.info("Doc id is", doc_id) + try: + classification_content = await fetch_document_vectordb_namespace(session, user_id, memory_details[0][0], doc_id) + retrieval_chunks = [item['text'] for item in + classification_content[0]['data']['Get'][memory_details[0][0]]] + logging.info("Classification content is", classification_content) + except: + classification_content = "" + retrieval_chunks = "" + # retrieval_chunks = [item['text'] for item in classification_content[0]['data']['Get'][memory_details[0]]] + # Concatenating the extracted text values + concatenated_retrievals = ' '.join(retrieval_chunks) + print(concatenated_retrievals) + logging.info("Retrieval chunks are", retrieval_chunks) + classification = await classify_documents(doc_name, document_id =doc_id, content=concatenated_retrievals) + + logging.info("Classification is", str(classification)) + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + if document_memory_types == ['PUBLIC']: + await create_public_memory(user_id=user_id, labels=['sr'], topic="PublicMemory") + ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic="PublicMemory") + neo4j_graph_db.close() + print(ids) + else: + ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic="SemanticMemory") + neo4j_graph_db.close() + print(ids) + + for id in ids: + print(id.get('memoryId')) + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + if document_memory_types == ['PUBLIC']: + + rs = neo4j_graph_db.create_document_node_cypher(classification, user_id, public_memory_id=id.get('memoryId')) + neo4j_graph_db.close() + else: + rs = neo4j_graph_db.create_document_node_cypher(classification, user_id, memory_type='SemanticMemory') + neo4j_graph_db.close() + logging.info("Cypher query is", rs) + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + neo4j_graph_db.query(rs) + neo4j_graph_db.close() + logging.info("WE GOT HERE") + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + if memory_details[0][1] == "PUBLIC": + + neo4j_graph_db.update_document_node_with_db_ids( vectordb_namespace=memory_details[0][0], + document_id=doc_id) + neo4j_graph_db.close() + else: + neo4j_graph_db.update_document_node_with_db_ids( vectordb_namespace=memory_details[0][0], + document_id=doc_id, user_id=user_id) + neo4j_graph_db.close() + # await update_entity_graph_summary(session, DocsModel, doc_id, True) + except Exception as e: + return e + +class ResponseString(BaseModel): + response: str = Field(default=None) # Defaulting to None or you can use a default string like "" + quotation: str = Field(default=None) # Same here + +# + +def generate_graph(input) -> ResponseString: + out = aclient.chat.completions.create( + model="gpt-4-1106-preview", + messages=[ + { + "role": "user", + "content": f"""Use the given context to answer query and use help of associated context: {input}. """, + + }, + { "role":"system", "content": """You are a top-tier algorithm + designed for using context summaries based on cognitive psychology to answer user queries, and provide a simple response. + Do not mention anything explicit about cognitive architecture, but use the context to answer the query. If you are using a document, reference document metadata field"""} + ], + response_model=ResponseString, + ) + return out +async def user_context_enrichment(session, user_id:str, query:str, generative_response:bool=False, memory_type:str=None)->str: + """ + Asynchronously enriches the user context by integrating various memory systems and document classifications. + + This function uses cognitive architecture to access and manipulate different memory systems (semantic, episodic, and procedural) associated with a user. + It fetches memory details from a Neo4j graph database, classifies document categories based on the user's query, and retrieves document IDs for relevant categories. + The function also dynamically manages memory attributes and methods, extending the context with document store information to enrich the user's query response. + + Parameters: + - session (AsyncSession): The database session for executing queries. + - user_id (str): The unique identifier of the user. + - query (str): The original query from the user. + + Returns: + - str: The final enriched context after integrating various memory systems and document classifications. + + The function performs several key operations: + 1. Retrieves semantic and episodic memory details for the user from the Neo4j graph database. + 2. Logs and classifies document categories relevant to the user's query. + 3. Fetches document IDs from Neo4j and corresponding memory names from a PostgreSQL database. + 4. Dynamically manages memory attributes and methods, including the addition of methods like 'add_memories', 'fetch_memories', and 'delete_memories' to the memory class. + 5. Extends the context with document store information relevant to the user's query. + 6. Generates and logs the final result after processing and integrating all information. + + Raises: + - Exception: Propagates any exceptions that occur during database operations or memory management. + + Example Usage: + ```python + enriched_context = await user_context_enrichment(session, "user123", "How does cognitive architecture work?") + ``` + """ + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + + # await user_query_to_graph_db(session, user_id, query) + + semantic_mem = neo4j_graph_db.retrieve_semantic_memory(user_id=user_id) + neo4j_graph_db.close() + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + episodic_mem = neo4j_graph_db.retrieve_episodic_memory(user_id=user_id) + neo4j_graph_db.close() + # public_mem = neo4j_graph_db.retrieve_public_memory(user_id=user_id) + + + + if detect_language(query) != "en": + query = translate_text(query, "sr", "en") + logging.info("Translated query is %s", str(query)) + + if memory_type=='PublicMemory': + + + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + summaries = await neo4j_graph_db.get_memory_linked_document_summaries(user_id=user_id, memory_type=memory_type) + neo4j_graph_db.close() + logging.info("Summaries are is %s", summaries) + # logging.info("Context from graphdb is %s", context) + # result = neo4j_graph_db.query(document_categories_query) + # summaries = [record.get("summary") for record in result] + # logging.info('Possible document categories are', str(result)) + # logging.info('Possible document categories are', str(categories)) + + max_attempts = 3 + relevant_summary_id = None + + for _ in range(max_attempts): + relevant_summary_id = await classify_call( query= query, document_summaries=str(summaries)) + + logging.info("Relevant summary id is %s", relevant_summary_id) + + if relevant_summary_id is not None: + break + + # logging.info("Relevant categories after the classifier are %s", relevant_categories) + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + password=config.graph_database_password) + postgres_id = await neo4j_graph_db.get_memory_linked_document_ids(user_id, summary_id = relevant_summary_id, memory_type=memory_type) + neo4j_graph_db.close() + # postgres_id = neo4j_graph_db.query(get_doc_ids) + logging.info("Postgres ids are %s", postgres_id) + namespace_id = await get_memory_name_by_doc_id(session, postgres_id[0]) + logging.info("Namespace ids are %s", namespace_id) + params= {"doc_id":postgres_id[0]} + namespace_id = namespace_id[0] + namespace_class = namespace_id + "_class" + if memory_type =='PublicMemory': + user_id = 'system_user' + + memory = await Memory.create_memory(user_id, session, namespace=namespace_id, job_id="23232", + memory_label=namespace_id) + + existing_user = await Memory.check_existing_user(user_id, session) + print("here is the existing user", existing_user) + await memory.manage_memory_attributes(existing_user) + + print("Namespace id is %s", namespace_id) + await memory.add_dynamic_memory_class(namespace_id.lower(), namespace_id) + + dynamic_memory_class = getattr(memory, namespace_class.lower(), None) + + methods_to_add = ["add_memories", "fetch_memories", "delete_memories"] + + if dynamic_memory_class is not None: + for method_name in methods_to_add: + await memory.add_method_to_class(dynamic_memory_class, method_name) + print(f"Memory method {method_name} has been added") + else: + print(f"No attribute named in memory.") + + print("Available memory classes:", await memory.list_memory_classes()) + results = await memory.dynamic_method_call(dynamic_memory_class, 'fetch_memories', + observation=query, params=postgres_id[0], search_type="summary_filter_by_object_name") + logging.info("Result is %s", str(results)) + + + search_context = "" + + for result in results['data']['Get'][namespace_id]: + # Assuming 'result' is a dictionary and has keys like 'source', 'text' + source = result['source'].replace('-', ' ').replace('.pdf', '').replace('.data/', '') + text = result['text'] + search_context += f"Document source: {source}, Document text: {text} \n" + + else: + search_context = "No relevant documents found" + + context = f""" You are a memory system that uses cognitive architecture to enrich the + LLM context and provide better query response. + You have access to the following information: + EPISODIC MEMORY: {episodic_mem[:200]} + SEMANTIC MEMORY: {semantic_mem[:200]} + PROCEDURAL MEMORY: NULL + SEARCH CONTEXT: The following documents provided with sources they were + extracted from could be used to provide an answer {search_context} + The original user query: {query} + """ + if generative_response is not True: + return context + else: + generative_result = generate_graph(context) + logging.info("Generative result is %s", generative_result.model_dump_json()) + return generative_result.model_dump_json() + + +async def create_public_memory(user_id: str=None, labels:list=None, topic:str=None) -> Optional[int]: + """ + Create a public memory node associated with a user in a Neo4j graph database. + If Public Memory exists, it will return the id of the memory. + This is intended as standalone node that can be attached to any user. + It is not attached to any user by default. + + Args: + user_id (str): The unique identifier for the user. + session (AsyncSession): An asynchronous session for database operations. + + Returns: + Optional[int]: The ID of the created public memory node or None if an error occurs. + :param labels: Label for the memory, to help filter for different countries + :param topic: Topic for the memory, to help provide a name + + """ + # Validate input parameters + if not labels: + labels = ['sr'] # Labels for the memory node + + if not topic: + topic = "PublicMemory" + + + try: + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, + username=config.graph_database_username, + password=config.graph_database_password) + + # Assuming the topic for public memory is predefined, e.g., "PublicMemory" + # Create the memory node + memory_id = await neo4j_graph_db.create_memory_node(labels=labels, topic=topic) + neo4j_graph_db.close() + return memory_id + except Neo4jError as e: + logging.error(f"Error creating public memory node: {e}") + return None + +async def attach_user_to_memory(user_id: str=None, labels:list=None, topic:str=None) -> Optional[int]: + """ + Link user to public memory + + Args: + user_id (str): The unique identifier for the user. + topic (str): Memory name + + + Returns: + Optional[int]: The ID of the created public memory node or None if an error occurs. + :param labels: Label for the memory, to help filter for different countries + :param topic: Topic for the memory, to help provide a name + + """ + # Validate input parameters + if not user_id: + raise ValueError("User ID is required.") + if not labels: + labels = ['sr'] # Labels for the memory node + + if not topic: + topic = "PublicMemory" + + + try: + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, + username=config.graph_database_username, + password=config.graph_database_password) + + # Assuming the topic for public memory is predefined, e.g., "PublicMemory" + ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic=topic) + neo4j_graph_db.close() + + for id in ids: + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, + username=config.graph_database_username, + password=config.graph_database_password) + linked_memory = neo4j_graph_db.link_public_memory_to_user(memory_id=id.get('memoryId'), user_id=user_id) + neo4j_graph_db.close() + return 1 + except Neo4jError as e: + logging.error(f"Error creating public memory node: {e}") + return None + +async def unlink_user_from_memory(user_id: str=None, labels:list=None, topic:str=None) -> Optional[int]: + """ + Unlink user from memory + + Args: + user_id (str): The unique identifier for the user. + topic (str): Memory name + + Returns: + Optional[int]: The ID of the created public memory node or None if an error occurs. + :param labels: Label for the memory, to help filter for different countries + :param topic: Topic for the memory, to help provide a name + + """ + # Validate input parameters + if not user_id: + raise ValueError("User ID is required.") + if not labels: + labels = ['sr'] # Labels for the memory node + + if not topic: + topic = "PublicMemory" + + + try: + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, + username=config.graph_database_username, + password=config.graph_database_password) + + # Assuming the topic for public memory is predefined, e.g., "PublicMemory" + ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic=topic) + neo4j_graph_db.close() + + for id in ids: + neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, + username=config.graph_database_username, + password=config.graph_database_password) + linked_memory = neo4j_graph_db.unlink_memory_from_user(memory_id=id.get('memoryId'), user_id=user_id) + neo4j_graph_db.close() + return 1 + except Neo4jError as e: + logging.error(f"Error creating public memory node: {e}") + return None + +async def relevance_feedback(query: str, input_type: str): + + max_attempts = 6 + result = None + for attempt in range(1, max_attempts + 1): + result = await classify_user_input(query, input_type=input_type) + if isinstance(result, bool): + break # Exit the loop if a result of type bool is obtained + return result + +async def main(): + user_id = "user_test_1_1" + + async with session_scope(AsyncSessionLocal()) as session: + # await update_entity(session, DocsModel, "8cd9a022-5a7a-4af5-815a-f988415536ae", True) + # output = await get_unsumarized_vector_db_namespace(session, user_id) + + + + class GraphQLQuery(BaseModel): + query: str + + gg = await user_query_to_graph_db(session, user_id, "How does cognitive architecture work?") + print(gg) + + # def cypher_statement_correcting( input: str) -> str: + # out = aclient.chat.completions.create( + # model=config.model, + # temperature=0, + # max_tokens=2000, + # messages=[ + # { + # "role": "user", + # "content": f"""Check the cypher query for syntax issues, and fix any if found and return it as is: {input}. """, + # + # }, + # {"role": "system", "content": """You are a top-tier algorithm + # designed for checking cypher queries for neo4j graph databases. You have to return input provided to you as is."""} + # ], + # response_model=GraphQLQuery, + # ) + # return out + # + # + # query= """WITH person1_4f21b68c73e24d0497e1010eb747b892, location2_dc0c68a9651142d38b6e117bfdc5c227, object3_4c7ba47babd24be1b35c30c42c87a3e9, product4_c984d5f9695f48ee9a43f58f57cc6740, location5_5e43f4c45b3c44ea897c12220db4c051, object6_5cdb87ad488c450c9dbce07b7daf3d8d, information7_f756e3f3720c4fe5aeb01287badaf088, event8_da6334e744454264900296319e14b532, action9_48e45419604e4d66b3e718ee1d6c095f, action10_f48acb1db4da4934afbe17363e9e63a4, user , semantic, episodic, buffer + # CREATE (person1_4f21b68c73e24d0497e1010eb747b892)-[:EXPERIENCED]->(event8_da6334e744454264900296319e14b532) + # CREATE (person1_4f21b68c73e24d0497e1010eb747b892)-[:HAS]->(object3_4c7ba47babd24be1b35c30c42c87a3e9) + # CREATE (object3_4c7ba47babd24be1b35c30c42c87a3e9)-[:INCLUDES]->(product4_c984d5f9695f48ee9a43f58f57cc6740) + # CREATE (product4_c984d5f9695f48ee9a43f58f57cc6740)-[:TO_BE_PURCHASED_AT]->(location5_5e43f4c45b3c44ea897c12220db4c051) + # CREATE (person1_4f21b68c73e24d0497e1010eb747b892)-[:INTENDS_TO_PERFORM]->(action9_48e45419604e4d66b3e718ee1d6c095f) + # CREATE (object6_5cdb87ad488c450c9dbce07b7daf3d8d)-[:A_CLASSICAL_BOOK_TO_BE_SUMMARIZED]->(information7_f756e3f3720c4fe5aeb01287badaf088) + # CREATE (person1_4f21b68c73e24d0497e1010eb747b892)-[:NEEDS_TO_COMPLETE]->(action10_f48acb1db4da4934afbe17363e9e63a4) + # WITH person1_4f21b68c73e24d0497e1010eb747b892, location2_dc0c68a9651142d38b6e117bfdc5c227, object3_4c7ba47babd24be1b35c30c42c87a3e9, product4_c984d5f9695f48ee9a43f58f57cc6740, location5_5e43f4c45b3c44ea897c12220db4c051, object6_5cdb87ad488c450c9dbce07b7daf3d8d, information7_f756e3f3720c4fe5aeb01287badaf088, event8_da6334e744454264900296319e14b532, action9_48e45419604e4d66b3e718ee1d6c095f, action10_f48acb1db4da4934afbe17363e9e63a4, user, semantic, episodic, buffer + # CREATE (episodic)-[:HAS_EVENT]->(person1_4f21b68c73e24d0497e1010eb747b892) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(person1_4f21b68c73e24d0497e1010eb747b892) + # CREATE (episodic)-[:HAS_EVENT]->(location2_dc0c68a9651142d38b6e117bfdc5c227) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(location2_dc0c68a9651142d38b6e117bfdc5c227) + # CREATE (episodic)-[:HAS_EVENT]->(object3_4c7ba47babd24be1b35c30c42c87a3e9) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(object3_4c7ba47babd24be1b35c30c42c87a3e9) + # CREATE (episodic)-[:HAS_EVENT]->(product4_c984d5f9695f48ee9a43f58f57cc6740) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(product4_c984d5f9695f48ee9a43f58f57cc6740) + # CREATE (episodic)-[:HAS_EVENT]->(location5_5e43f4c45b3c44ea897c12220db4c051) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(location5_5e43f4c45b3c44ea897c12220db4c051) + # CREATE (episodic)-[:HAS_EVENT]->(object6_5cdb87ad488c450c9dbce07b7daf3d8d) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(object6_5cdb87ad488c450c9dbce07b7daf3d8d) + # CREATE (episodic)-[:HAS_EVENT]->(information7_f756e3f3720c4fe5aeb01287badaf088) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(information7_f756e3f3720c4fe5aeb01287badaf088) + # CREATE (episodic)-[:HAS_EVENT]->(event8_da6334e744454264900296319e14b532) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(event8_da6334e744454264900296319e14b532) + # CREATE (episodic)-[:HAS_EVENT]->(action9_48e45419604e4d66b3e718ee1d6c095f) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(action9_48e45419604e4d66b3e718ee1d6c095f) + # CREATE (episodic)-[:HAS_EVENT]->(action10_f48acb1db4da4934afbe17363e9e63a4) + # CREATE (buffer)-[:CURRENTLY_HOLDING]->(action10_f48acb1db4da4934afbe17363e9e63a4)""" + # + # out = cypher_statement_correcting(query) + # print(out) + # + # out = await user_query_to_graph_db(session, user_id, "I walked in the forest yesterday and added to my list I need to buy some milk in the store and get a summary from a classical book i read yesterday") + # print(out) + # load_doc_to_graph = await add_documents_to_graph_db(session, user_id) + # print(load_doc_to_graph) + user_id = 'test_user' + # loader_settings = { + # "format": "PDF", + # "source": "DEVICE", + # "path": [".data"] + # } + # await load_documents_to_vectorstore(session, user_id, loader_settings=loader_settings) + # await create_public_memory(user_id=user_id, labels=['sr'], topic="PublicMemory") + # await add_documents_to_graph_db(session, user_id) + # + # neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, + # password=config.graph_database_password) + + # await attach_user_to_memory(user_id=user_id, labels=['sr'], topic="PublicMemory") + + # return_ = await user_context_enrichment(user_id=user_id, query="Koja je minimalna širina vrata za osobe sa invaliditetom?", session=session, memory_type="PublicMemory", generative_response=True) + # print(return_) + # aa = await relevance_feedback("I need to understand how to build a staircase in an apartment building", "PublicMemory") + # print(aa) + + # document_summary = { + # 'DocumentCategory': 'Science', + # 'Title': 'The Future of AI', + # 'Summary': 'An insightful article about the advancements in AI.', + # 'd_id': 'doc123' + # } + # + # # Example user ID + # user_id = 'user' + # + # # value = await neo4j_graph_db.create_memory_node(labels=['sr']) + # # print(value) + # # neo4j_graph_db.close() + # + # await add_documents_to_graph_db(session, user_id) + # neo4j_graph_db.link_public_memory_to_user(memory_id = 17,user_id=user_id) + # + # ids = neo4j_graph_db.retrieve_node_id_for_memory_type(topic="Document") + # print(ids) + # + # for id in ids: + # print(id.get('memoryId')) + # + # neo4j_graph_db.delete_memory_node(memory_id = id.get('memoryId'), topic="Document") + # + # neo4j_graph_db.delete_memory_node(memory_id=16, topic="PublicSerbianArchitecture") + # neo4j_graph_db.unlink_memory_from_user(memory_id = 17,user_id=user_id) + # cypher_query_public = neo4j_graph_db.create_document_node_cypher(document_summary, user_id, memory_type="PUBLIC") + # neo4j_graph_db.query(cypher_query_public) + # link_memory_to_user(user_id, session) + + # neo4j_graph_db.create_memory_node(labels=['sr']) + # out = await get_vectordb_namespace(session, user_id) + # params = { + # "version": "1.0", + # "agreement_id": "AG123456", + # "privacy_policy": "https://example.com/privacy", + # "terms_of_service": "https://example.com/terms", + # "format": "json", + # "schema_version": "1.1", + # "checksum": "a1b2c3d4e5f6", + # "owner": "John Doe", + # "license": "MIT", + # "validity_start": "2023-08-01", + # "validity_end": "2024-07-31", + # } + # loader_settings = { + # "format": "PDF", + # "source": "DEVICE", + # "path": [".data"], + # "strategy": "SUMMARY", + # } + # await load_documents_to_vectorstore(session, user_id, loader_settings=loader_settings) + # await user_query_to_graph_db(session, user_id, "I walked in the forest yesterday and added to my list I need to buy some milk in the store and get a summary from a classical book i read yesterday") + # await add_documents_to_graph_db(session, user_id, loader_settings=loader_settings) + # await user_context_enrichment(session, user_id, query="Tell me about the book I read yesterday") + + +if __name__ == "__main__": + import asyncio + + + asyncio.run(main()) + diff --git a/level_4/poetry.lock b/poetry.lock similarity index 77% rename from level_4/poetry.lock rename to poetry.lock index ab48d70bd..be9d0dc48 100644 --- a/level_4/poetry.lock +++ b/poetry.lock @@ -1,16 +1,5 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. -[[package]] -name = "absl-py" -version = "2.0.0" -description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." -optional = false -python-versions = ">=3.7" -files = [ - {file = "absl-py-2.0.0.tar.gz", hash = "sha256:d9690211c5fcfefcdd1a45470ac2b5c5acd45241c3af71eed96bc5441746c0d5"}, - {file = "absl_py-2.0.0-py3-none-any.whl", hash = "sha256:9a28abb62774ae4e8edbe2dd4c49ffcd45a6a848952a5eccc6a49f3f0fc1e2f3"}, -] - [[package]] name = "aiohttp" version = "3.9.1" @@ -298,17 +287,17 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "authlib" -version = "1.2.1" +version = "1.3.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"}, - {file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"}, + {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, + {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, ] [package.dependencies] -cryptography = ">=3.2" +cryptography = "*" [[package]] name = "backoff" @@ -398,32 +387,32 @@ numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""} [[package]] name = "boto3" -version = "1.34.1" +version = "1.34.9" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.1-py3-none-any.whl", hash = "sha256:aae7dbff19cb36dfd0ca391684c652a496156ca1d9e696fcbdcffde2b9e7c9bb"}, - {file = "boto3-1.34.1.tar.gz", hash = "sha256:a08123b49430cd66850def7aa9ca6c14f8a99a95cdbcddabe805484001a5a6c3"}, + {file = "boto3-1.34.9-py3-none-any.whl", hash = "sha256:8e48343d52389041af053992decf651bc4fc7b2d65eca12acdcff62d446ecdf4"}, + {file = "boto3-1.34.9.tar.gz", hash = "sha256:18c386a55e461749e6c9c8a10627a230db18a20dd72f2950ce19546974f15cd5"}, ] [package.dependencies] -botocore = ">=1.34.1,<1.35.0" +botocore = ">=1.34.9,<1.35.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.9.0,<0.10.0" +s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.1" +version = "1.34.9" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.1-py3-none-any.whl", hash = "sha256:99bd6e9273eba8e81fbcf9881fc0f390f1e7a563bf76faa5fab16ad9bf69de3f"}, - {file = "botocore-1.34.1.tar.gz", hash = "sha256:0688d095305ba8638745dcdf85daa8aa41c36b27912c41e93447c1ef401b6ca1"}, + {file = "botocore-1.34.9-py3-none-any.whl", hash = "sha256:b40f027f371a1bd211ef67a3727c74bc3713af5fc5d830d4587abda296ebb19e"}, + {file = "botocore-1.34.9.tar.gz", hash = "sha256:2cf43fa5b5438a95fc466c700f3098228b45df38e311103488554b2334b42ee3"}, ] [package.dependencies] @@ -432,7 +421,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.19.17)"] +crt = ["awscrt (==0.19.19)"] [[package]] name = "cachetools" @@ -965,85 +954,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "datasets" -version = "2.15.0" -description = "HuggingFace community-driven open-source library of datasets" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "datasets-2.15.0-py3-none-any.whl", hash = "sha256:6d658d23811393dfc982d026082e1650bdaaae28f6a86e651966cb072229a228"}, - {file = "datasets-2.15.0.tar.gz", hash = "sha256:a26d059370bd7503bd60e9337977199a13117a83f72fb61eda7e66f0c4d50b2b"}, -] - -[package.dependencies] -aiohttp = "*" -dill = ">=0.3.0,<0.3.8" -fsspec = {version = ">=2023.1.0,<=2023.10.0", extras = ["http"]} -huggingface-hub = ">=0.18.0" -multiprocess = "*" -numpy = ">=1.17" -packaging = "*" -pandas = "*" -pyarrow = ">=8.0.0" -pyarrow-hotfix = "*" -pyyaml = ">=5.1" -requests = ">=2.19.0" -tqdm = ">=4.62.1" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] -audio = ["librosa", "soundfile (>=0.12.1)"] -benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] -dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"] -jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] -quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"] -s3 = ["s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] -tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -torch = ["torch"] -vision = ["Pillow (>=6.2.1)"] - -[[package]] -name = "deepeval" -version = "0.20.37" -description = "The open-source evaluation framework for LLMs." -optional = false -python-versions = "*" -files = [ - {file = "deepeval-0.20.37-py3-none-any.whl", hash = "sha256:9bf243b7b89a4033b417c2c256b7c3e576a7b872fda8a75dc721848850143d4c"}, - {file = "deepeval-0.20.37.tar.gz", hash = "sha256:e2ea8d0b470b9d883b57b84ba807bff7a0f82f742db1a32d0f3c397909f9addb"}, -] - -[package.dependencies] -detoxify = "*" -langchain = "*" -nltk = "3.8.1" -pandas = "*" -portalocker = "*" -protobuf = "3.20.3" -pydantic = "*" -pytest = "*" -pytest-xdist = "*" -ragas = "*" -requests = "*" -rich = "*" -rouge-score = "0.1.2" -sentence-transformers = "*" -sentry-sdk = "*" -tabulate = "*" -tqdm = "*" -transformers = "*" -typer = "0.9.0" - -[package.extras] -bias = ["Dbias", "tensorflow"] -dev = ["black"] - [[package]] name = "deprecated" version = "1.2.14" @@ -1061,45 +971,15 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] -[[package]] -name = "detoxify" -version = "0.5.0" -description = "A python library for detecting toxic comments" -optional = false -python-versions = ">=3.6" -files = [ - {file = "detoxify-0.5.0-py3-none-any.whl", hash = "sha256:fd65e083aceab0762b1f4ec0428ac94b593ebfa1be31d03a84aafdb0155a83d3"}, - {file = "detoxify-0.5.0.tar.gz", hash = "sha256:91044238f9b829e9c3b348e5d9f44f72ca29f66718e8d7134b4973b5dfdaae68"}, -] - -[package.dependencies] -sentencepiece = ">=0.1.94" -torch = ">=1.7.0" -transformers = "!=4.18.0" - -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - [[package]] name = "distro" -version = "1.8.0" +version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" files = [ - {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"}, - {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"}, + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, ] [[package]] @@ -1304,20 +1184,6 @@ files = [ [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "execnet" -version = "2.0.2" -description = "execnet: rapid multi-Python deployment" -optional = false -python-versions = ">=3.7" -files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, -] - -[package.extras] -testing = ["hatch", "pre-commit", "pytest", "tox"] - [[package]] name = "fastapi" version = "0.104.1" @@ -1414,59 +1280,59 @@ files = [ [[package]] name = "fonttools" -version = "4.46.0" +version = "4.47.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4e69e2c7f93b695d2e6f18f709d501d945f65c1d237dafaabdd23cd935a5276"}, - {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:25852f0c63df0af022f698464a4a80f7d1d5bd974bcd22f995f6b4ad198e32dd"}, - {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adab73618d0a328b203a0e242b3eba60a2b5662d9cb2bd16ed9c52af8a7d86af"}, - {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf923a4a556ab4cc4c52f69a4a2db624cf5a2cf360394368b40c5152fe3321e"}, - {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:87c214197712cc14fd2a4621efce2a9c501a77041232b789568149a8a3161517"}, - {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:156ae342a1ed1fe38e180de471e98fbf5b2b6ae280fa3323138569c4ca215844"}, - {file = "fonttools-4.46.0-cp310-cp310-win32.whl", hash = "sha256:c506e3d3a9e898caee4dc094f34b49c5566870d5a2d1ca2125f0a9f35ecc2205"}, - {file = "fonttools-4.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:f8bc3973ed58893c4107993e0a7ae34901cb572b5e798249cbef35d30801ffd4"}, - {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:982f69855ac258260f51048d9e0c53c5f19881138cc7ca06deb38dc4b97404b6"}, - {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c23c59d321d62588620f2255cf951270bf637d88070f38ed8b5e5558775b86c"}, - {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e94244ec24a940ecfbe5b31c975c8a575d5ed2d80f9a280ce3b21fa5dc9c34"}, - {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9f9cdd7ef63d1b8ac90db335762451452426b3207abd79f60da510cea62da5"}, - {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ca9eceebe70035b057ce549e2054cad73e95cac3fe91a9d827253d1c14618204"}, - {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8be6adfa4e15977075278dd0a0bae74dec59be7b969b5ceed93fb86af52aa5be"}, - {file = "fonttools-4.46.0-cp311-cp311-win32.whl", hash = "sha256:7b5636f5706d49f13b6d610fe54ee662336cdf56b5a6f6683c0b803e23d826d2"}, - {file = "fonttools-4.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:49ea0983e55fd7586a809787cd4644a7ae471e53ab8ddc016f9093b400e32646"}, - {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7b460720ce81773da1a3e7cc964c48e1e11942b280619582a897fa0117b56a62"}, - {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8bee9f4fc8c99824a424ae45c789ee8c67cb84f8e747afa7f83b7d3cef439c3b"}, - {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3d7b96aba96e05e8c911ce2dfc5acc6a178b8f44f6aa69371ab91aa587563da"}, - {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6aeb5c340416d11a3209d75c48d13e72deea9e1517837dd1522c1fd1f17c11"}, - {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c779f8701deedf41908f287aeb775b8a6f59875ad1002b98ac6034ae4ddc1b7b"}, - {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce199227ce7921eaafdd4f96536f16b232d6b580ce74ce337de544bf06cb2752"}, - {file = "fonttools-4.46.0-cp312-cp312-win32.whl", hash = "sha256:1c9937c4dd1061afd22643389445fabda858af5e805860ec3082a4bc07c7a720"}, - {file = "fonttools-4.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:a9fa52ef8fd14d7eb3d813e1451e7ace3e1eebfa9b7237d3f81fee8f3de6a114"}, - {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c94564b1f3b5dd87e73577610d85115b1936edcc596deaf84a31bbe70e17456b"}, - {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4a50a1dfad7f7ba5ca3f99cc73bf5cdac67ceade8e4b355a877521f20ad1b63"}, - {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89c2c520f9492844ecd6316d20c6c7a157b5c0cb73a1411b3db28ee304f30122"}, - {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5b7905fd68eacb7cc56a13139da5c312c45baae6950dd00b02563c54508a041"}, - {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8485cc468288e213f31afdaf1fdda3c79010f542559fbba936a54f4644df2570"}, - {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:87c3299da7da55394fb324349db0ede38114a46aafd0e7dfcabfecd28cdd94c3"}, - {file = "fonttools-4.46.0-cp38-cp38-win32.whl", hash = "sha256:f5f1423a504ccc329efb5aa79738de83d38c072be5308788dde6bd419969d7f5"}, - {file = "fonttools-4.46.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d4a4ebcc76e30898ff3296ea786491c70e183f738319ae2629e0d44f17ece42"}, - {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9a0e422ab79e5cb2b47913be6a4b5fd20c4c7ac34a24f3691a4e099e965e0b8"}, - {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13ac0cba2fc63fa4b232f2a7971f35f35c6eaf10bd1271fa96d4ce6253a8acfd"}, - {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:795150d5edc595e1a2cfb3d65e8f4f3d027704fc2579f8990d381bef6b188eb6"}, - {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d00fc63131dcac6b25f50a5a129758438317e54e3ce5587163f7058de4b0e933"}, - {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3033b55f401a622de2630b3982234d97219d89b058607b87927eccb0f922313c"}, - {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e26e7fb908ae4f622813e7cb32cd2db6c24e3122bb3b98f25e832a2fe0e7e228"}, - {file = "fonttools-4.46.0-cp39-cp39-win32.whl", hash = "sha256:2d0eba685938c603f2f648dfc0aadbf8c6a4fe1c7ca608c2970a6ef39e00f254"}, - {file = "fonttools-4.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:5200b01f463d97cc2b7ff8a1e3584151f4413e98cb8419da5f17d1dbb84cc214"}, - {file = "fonttools-4.46.0-py3-none-any.whl", hash = "sha256:5b627ed142398ea9202bd752c04311592558964d1a765fb2f78dc441a05633f4"}, - {file = "fonttools-4.46.0.tar.gz", hash = "sha256:2ae45716c27a41807d58a9f3f59983bdc8c0a46cb259e4450ab7e196253a9853"}, + {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d2404107626f97a221dc1a65b05396d2bb2ce38e435f64f26ed2369f68675d9"}, + {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c01f409be619a9a0f5590389e37ccb58b47264939f0e8d58bfa1f3ba07d22671"}, + {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d986b66ff722ef675b7ee22fbe5947a41f60a61a4da15579d5e276d897fbc7fa"}, + {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8acf6dd0434b211b3bd30d572d9e019831aae17a54016629fa8224783b22df8"}, + {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:495369c660e0c27233e3c572269cbe520f7f4978be675f990f4005937337d391"}, + {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59227d7ba5b232281c26ae04fac2c73a79ad0e236bca5c44aae904a18f14faf"}, + {file = "fonttools-4.47.0-cp310-cp310-win32.whl", hash = "sha256:59a6c8b71a245800e923cb684a2dc0eac19c56493e2f896218fcf2571ed28984"}, + {file = "fonttools-4.47.0-cp310-cp310-win_amd64.whl", hash = "sha256:52c82df66201f3a90db438d9d7b337c7c98139de598d0728fb99dab9fd0495ca"}, + {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:854421e328d47d70aa5abceacbe8eef231961b162c71cbe7ff3f47e235e2e5c5"}, + {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:511482df31cfea9f697930f61520f6541185fa5eeba2fa760fe72e8eee5af88b"}, + {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0e2c88c8c985b7b9a7efcd06511fb0a1fe3ddd9a6cd2895ef1dbf9059719d7"}, + {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7a0a8848726956e9d9fb18c977a279013daadf0cbb6725d2015a6dd57527992"}, + {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e869da810ae35afb3019baa0d0306cdbab4760a54909c89ad8904fa629991812"}, + {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd23848f877c3754f53a4903fb7a593ed100924f9b4bff7d5a4e2e8a7001ae11"}, + {file = "fonttools-4.47.0-cp311-cp311-win32.whl", hash = "sha256:bf1810635c00f7c45d93085611c995fc130009cec5abdc35b327156aa191f982"}, + {file = "fonttools-4.47.0-cp311-cp311-win_amd64.whl", hash = "sha256:61df4dee5d38ab65b26da8efd62d859a1eef7a34dcbc331299a28e24d04c59a7"}, + {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e3f4d61f3a8195eac784f1d0c16c0a3105382c1b9a74d99ac4ba421da39a8826"}, + {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:174995f7b057e799355b393e97f4f93ef1f2197cbfa945e988d49b2a09ecbce8"}, + {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea592e6a09b71cb7a7661dd93ac0b877a6228e2d677ebacbad0a4d118494c86d"}, + {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bdbe90b33897d9cc4a39f8e415b0fcdeae4c40a99374b8a4982f127ff5c767"}, + {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:843509ae9b93db5aaf1a6302085e30bddc1111d31e11d724584818f5b698f500"}, + {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9acfa1cdc479e0dde528b61423855913d949a7f7fe09e276228298fef4589540"}, + {file = "fonttools-4.47.0-cp312-cp312-win32.whl", hash = "sha256:66c92ec7f95fd9732550ebedefcd190a8d81beaa97e89d523a0d17198a8bda4d"}, + {file = "fonttools-4.47.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8fa20748de55d0021f83754b371432dca0439e02847962fc4c42a0e444c2d78"}, + {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c75e19971209fbbce891ebfd1b10c37320a5a28e8d438861c21d35305aedb81c"}, + {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e79f1a3970d25f692bbb8c8c2637e621a66c0d60c109ab48d4a160f50856deff"}, + {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:562681188c62c024fe2c611b32e08b8de2afa00c0c4e72bed47c47c318e16d5c"}, + {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a77a60315c33393b2bd29d538d1ef026060a63d3a49a9233b779261bad9c3f71"}, + {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4fabb8cc9422efae1a925160083fdcbab8fdc96a8483441eb7457235df625bd"}, + {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2a78dba8c2a1e9d53a0fb5382979f024200dc86adc46a56cbb668a2249862fda"}, + {file = "fonttools-4.47.0-cp38-cp38-win32.whl", hash = "sha256:e6b968543fde4119231c12c2a953dcf83349590ca631ba8216a8edf9cd4d36a9"}, + {file = "fonttools-4.47.0-cp38-cp38-win_amd64.whl", hash = "sha256:4a9a51745c0439516d947480d4d884fa18bd1458e05b829e482b9269afa655bc"}, + {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:62d8ddb058b8e87018e5dc26f3258e2c30daad4c87262dfeb0e2617dd84750e6"}, + {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5dde0eab40faaa5476133123f6a622a1cc3ac9b7af45d65690870620323308b4"}, + {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4da089f6dfdb822293bde576916492cd708c37c2501c3651adde39804630538"}, + {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:253bb46bab970e8aae254cebf2ae3db98a4ef6bd034707aa68a239027d2b198d"}, + {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1193fb090061efa2f9e2d8d743ae9850c77b66746a3b32792324cdce65784154"}, + {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:084511482dd265bce6dca24c509894062f0117e4e6869384d853f46c0e6d43be"}, + {file = "fonttools-4.47.0-cp39-cp39-win32.whl", hash = "sha256:97620c4af36e4c849e52661492e31dc36916df12571cb900d16960ab8e92a980"}, + {file = "fonttools-4.47.0-cp39-cp39-win_amd64.whl", hash = "sha256:e77bdf52185bdaf63d39f3e1ac3212e6cfa3ab07d509b94557a8902ce9c13c82"}, + {file = "fonttools-4.47.0-py3-none-any.whl", hash = "sha256:d6477ba902dd2d7adda7f0fd3bfaeb92885d45993c9e1928c9f28fc3961415f7"}, + {file = "fonttools-4.47.0.tar.gz", hash = "sha256:ec13a10715eef0e031858c1c23bfaee6cba02b97558e4a7bfa089dba4a8c2ebf"}, ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "scipy"] +interpolatable = ["munkres", "pycairo", "scipy"] lxml = ["lxml (>=4.0,<5)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] @@ -1565,19 +1431,15 @@ files = [ [[package]] name = "fsspec" -version = "2023.10.0" +version = "2023.12.2" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.10.0-py3-none-any.whl", hash = "sha256:346a8f024efeb749d2a5fca7ba8854474b1ff9af7c3faaf636a4548781136529"}, - {file = "fsspec-2023.10.0.tar.gz", hash = "sha256:330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5"}, + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} -requests = {version = "*", optional = true, markers = "extra == \"http\""} - [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -1678,73 +1540,73 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= [[package]] name = "greenlet" -version = "3.0.2" +version = "3.0.3" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9acd8fd67c248b8537953cb3af8787c18a87c33d4dcf6830e410ee1f95a63fd4"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:339c0272a62fac7e602e4e6ec32a64ff9abadc638b72f17f6713556ed011d493"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38878744926cec29b5cc3654ef47f3003f14bfbba7230e3c8492393fe29cc28b"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3f0497db77cfd034f829678b28267eeeeaf2fc21b3f5041600f7617139e6773"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1a8a08de7f68506a38f9a2ddb26bbd1480689e66d788fcd4b5f77e2d9ecfcc"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89a6f6ddcbef4000cda7e205c4c20d319488ff03db961d72d4e73519d2465309"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1f647fe5b94b51488b314c82fdda10a8756d650cee8d3cd29f657c6031bdf73"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9560c580c896030ff9c311c603aaf2282234643c90d1dec738a1d93e3e53cd51"}, - {file = "greenlet-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2e9c5423046eec21f6651268cb674dfba97280701e04ef23d312776377313206"}, - {file = "greenlet-3.0.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1fd25dfc5879a82103b3d9e43fa952e3026c221996ff4d32a9c72052544835d"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfdc950dd25f25d6582952e58521bca749cf3eeb7a9bad69237024308c8196"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edf7a1daba1f7c54326291a8cde58da86ab115b78c91d502be8744f0aa8e3ffa"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4cf532bf3c58a862196b06947b1b5cc55503884f9b63bf18582a75228d9950e"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e79fb5a9fb2d0bd3b6573784f5e5adabc0b0566ad3180a028af99523ce8f6138"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:006c1028ac0cfcc4e772980cfe73f5476041c8c91d15d64f52482fc571149d46"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fefd5eb2c0b1adffdf2802ff7df45bfe65988b15f6b972706a0e55d451bffaea"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c0fdb8142742ee68e97c106eb81e7d3e883cc739d9c5f2b28bc38a7bafeb6d1"}, - {file = "greenlet-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:8f8d14a0a4e8c670fbce633d8b9a1ee175673a695475acd838e372966845f764"}, - {file = "greenlet-3.0.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:654b84c9527182036747938b81938f1d03fb8321377510bc1854a9370418ab66"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bc4fde0842ff2b9cf33382ad0b4db91c2582db836793d58d174c569637144"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27b142a9080bdd5869a2fa7ebf407b3c0b24bd812db925de90e9afe3c417fd6"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0df7eed98ea23b20e9db64d46eb05671ba33147df9405330695bcd81a73bb0c9"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5d60805057d8948065338be6320d35e26b0a72f45db392eb32b70dd6dc9227"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0e28f5233d64c693382f66d47c362b72089ebf8ac77df7e12ac705c9fa1163d"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e4bfa752b3688d74ab1186e2159779ff4867644d2b1ebf16db14281f0445377"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c42bb589e6e9f9d8bdd79f02f044dff020d30c1afa6e84c0b56d1ce8a324553c"}, - {file = "greenlet-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:b2cedf279ca38ef3f4ed0d013a6a84a7fc3d9495a716b84a5fc5ff448965f251"}, - {file = "greenlet-3.0.2-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:6d65bec56a7bc352bcf11b275b838df618651109074d455a772d3afe25390b7d"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0acadbc3f72cb0ee85070e8d36bd2a4673d2abd10731ee73c10222cf2dd4713c"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14b5d999aefe9ffd2049ad19079f733c3aaa426190ffecadb1d5feacef8fe397"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f27aa32466993c92d326df982c4acccd9530fe354e938d9e9deada563e71ce76"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f34a765c5170c0673eb747213a0275ecc749ab3652bdbec324621ed5b2edaef"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:520fcb53a39ef90f5021c77606952dbbc1da75d77114d69b8d7bded4a8e1a813"}, - {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1fceb5351ab1601903e714c3028b37f6ea722be6873f46e349a960156c05650"}, - {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7363756cc439a503505b67983237d1cc19139b66488263eb19f5719a32597836"}, - {file = "greenlet-3.0.2-cp37-cp37m-win32.whl", hash = "sha256:d5547b462b8099b84746461e882a3eb8a6e3f80be46cb6afb8524eeb191d1a30"}, - {file = "greenlet-3.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:950e21562818f9c771989b5b65f990e76f4ac27af66e1bb34634ae67886ede2a"}, - {file = "greenlet-3.0.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d64643317e76b4b41fdba659e7eca29634e5739b8bc394eda3a9127f697ed4b0"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f9ea7c2c9795549653b6f7569f6bc75d2c7d1f6b2854eb8ce0bc6ec3cb2dd88"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db4233358d3438369051a2f290f1311a360d25c49f255a6c5d10b5bcb3aa2b49"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bf77b41798e8417657245b9f3649314218a4a17aefb02bb3992862df32495"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d0df07a38e41a10dfb62c6fc75ede196572b580f48ee49b9282c65639f3965"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d247260db20887ae8857c0cbc750b9170f0b067dd7d38fb68a3f2334393bd3"}, - {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a37ae53cca36823597fd5f65341b6f7bac2dd69ecd6ca01334bb795460ab150b"}, - {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:80d068e4b6e2499847d916ef64176811ead6bf210a610859220d537d935ec6fd"}, - {file = "greenlet-3.0.2-cp38-cp38-win32.whl", hash = "sha256:b1405614692ac986490d10d3e1a05e9734f473750d4bee3cf7d1286ef7af7da6"}, - {file = "greenlet-3.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8756a94ed8f293450b0e91119eca2a36332deba69feb2f9ca410d35e74eae1e4"}, - {file = "greenlet-3.0.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2c93cd03acb1499ee4de675e1a4ed8eaaa7227f7949dc55b37182047b006a7aa"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dac09e3c0b78265d2e6d3cbac2d7c48bd1aa4b04a8ffeda3adde9f1688df2c3"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee59c4627c8c4bb3e15949fbcd499abd6b7f4ad9e0bfcb62c65c5e2cabe0ec4"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18fe39d70d482b22f0014e84947c5aaa7211fb8e13dc4cc1c43ed2aa1db06d9a"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84bef3cfb6b6bfe258c98c519811c240dbc5b33a523a14933a252e486797c90"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aecea0442975741e7d69daff9b13c83caff8c13eeb17485afa65f6360a045765"}, - {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f260e6c2337871a52161824058923df2bbddb38bc11a5cbe71f3474d877c5bd9"}, - {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fc14dd9554f88c9c1fe04771589ae24db76cd56c8f1104e4381b383d6b71aff8"}, - {file = "greenlet-3.0.2-cp39-cp39-win32.whl", hash = "sha256:bfcecc984d60b20ffe30173b03bfe9ba6cb671b0be1e95c3e2056d4fe7006590"}, - {file = "greenlet-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:c235131bf59d2546bb3ebaa8d436126267392f2e51b85ff45ac60f3a26549af0"}, - {file = "greenlet-3.0.2.tar.gz", hash = "sha256:1c1129bc47266d83444c85a8e990ae22688cf05fb20d7951fd2866007c2ba9bc"}, + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -1813,6 +1675,74 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.60.0)"] +[[package]] +name = "grpcio-tools" +version = "1.60.0" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-tools-1.60.0.tar.gz", hash = "sha256:ed30499340228d733ff69fcf4a66590ed7921f94eb5a2bf692258b1280b9dac7"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:6807b7a3f3e6e594566100bd7fe04a2c42ce6d5792652677f1aaf5aa5adaef3d"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:857c5351e9dc33a019700e171163f94fcc7e3ae0f6d2b026b10fda1e3c008ef1"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec0e401e9a43d927d216d5169b03c61163fb52b665c5af2fed851357b15aef88"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e68dc4474f30cad11a965f0eb5d37720a032b4720afa0ec19dbcea2de73b5aae"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbf0ed772d2ae7e8e5d7281fcc00123923ab130b94f7a843eee9af405918f924"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c771b19dce2bfe06899247168c077d7ab4e273f6655d8174834f9a6034415096"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5614cf0960456d21d8a0f4902e3e5e3bcacc4e400bf22f196e5dd8aabb978b7"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-win32.whl", hash = "sha256:87cf439178f3eb45c1a889b2e4a17cbb4c450230d92c18d9c57e11271e239c55"}, + {file = "grpcio_tools-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:687f576d7ff6ce483bc9a196d1ceac45144e8733b953620a026daed8e450bc38"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a8a758701f3ac07ed85f5a4284c6a9ddefcab7913a8e552497f919349e72438"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:7c1cde49631732356cb916ee1710507967f19913565ed5f9991e6c9cb37e3887"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:d941749bd8dc3f8be58fe37183143412a27bec3df8482d5abd6b4ec3f1ac2924"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ee35234f1da8fba7ddbc544856ff588243f1128ea778d7a1da3039be829a134"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f7a5094adb49e85db13ea3df5d99a976c2bdfd83b0ba26af20ebb742ac6786"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:24c4ead4a03037beaeb8ef2c90d13d70101e35c9fae057337ed1a9144ef10b53"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811abb9c4fb6679e0058dfa123fb065d97b158b71959c0e048e7972bbb82ba0f"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-win32.whl", hash = "sha256:bd2a17b0193fbe4793c215d63ce1e01ae00a8183d81d7c04e77e1dfafc4b2b8a"}, + {file = "grpcio_tools-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:b22b1299b666eebd5752ba7719da536075eae3053abcf2898b65f763c314d9da"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:74025fdd6d1cb7ba4b5d087995339e9a09f0c16cf15dfe56368b23e41ffeaf7a"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:5a907a4f1ffba86501b2cdb8682346249ea032b922fc69a92f082ba045cca548"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:1fbb9554466d560472f07d906bfc8dcaf52f365c2a407015185993e30372a886"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f10ef47460ce3c6fd400f05fe757b90df63486c9b84d1ecad42dcc5f80c8ac14"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:321b18f42a70813545e416ddcb8bf20defa407a8114906711c9710a69596ceda"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:081336d8258f1a56542aa8a7a5dec99a2b38d902e19fbdd744594783301b0210"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:addc9b23d6ff729d9f83d4a2846292d4c84f5eb2ec38f08489a6a0d66ac2b91e"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-win32.whl", hash = "sha256:e87cabac7969bdde309575edc2456357667a1b28262b2c1f12580ef48315b19d"}, + {file = "grpcio_tools-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70d867c120d9849093b0ac24d861e378bc88af2552e743d83b9f642d2caa7c2"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:559ce714fe212aaf4abbe1493c5bb8920def00cc77ce0d45266f4fd9d8b3166f"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:7a5263a0f2ddb7b1cfb2349e392cfc4f318722e0f48f886393e06946875d40f3"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:18976684a931ca4bcba65c78afa778683aefaae310f353e198b1823bf09775a0"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5c519a0d4ba1ab44a004fa144089738c59278233e2010b2cf4527dc667ff297"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6170873b1e5b6580ebb99e87fb6e4ea4c48785b910bd7af838cc6e44b2bccb04"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fb4df80868b3e397d5fbccc004c789d2668b622b51a9d2387b4c89c80d31e2c5"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dba6e32c87b4af29b5f475fb2f470f7ee3140bfc128644f17c6c59ddeb670680"}, + {file = "grpcio_tools-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f610384dee4b1ca705e8da66c5b5fe89a2de3d165c5282c3d1ddf40cb18924e4"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:4041538f55aad5b3ae7e25ab314d7995d689e968bfc8aa169d939a3160b1e4c6"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2fb4cf74bfe1e707cf10bc9dd38a1ebaa145179453d150febb121c7e9cd749bf"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2fd1671c52f96e79a2302c8b1c1f78b8a561664b8b3d6946f20d8f1cc6b4225a"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd1e68c232fe01dd5312a8dbe52c50ecd2b5991d517d7f7446af4ba6334ba872"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17a32b3da4fc0798cdcec0a9c974ac2a1e98298f151517bf9148294a3b1a5742"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9970d384fb0c084b00945ef57d98d57a8d32be106d8f0bd31387f7cbfe411b5b"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5ce6bbd4936977ec1114f2903eb4342781960d521b0d82f73afedb9335251f6f"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-win32.whl", hash = "sha256:2e00de389729ca8d8d1a63c2038703078a887ff738dc31be640b7da9c26d0d4f"}, + {file = "grpcio_tools-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:6192184b1f99372ff1d9594bd4b12264e3ff26440daba7eb043726785200ff77"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:eae27f9b16238e2aaee84c77b5923c6924d6dccb0bdd18435bf42acc8473ae1a"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b96981f3a31b85074b73d97c8234a5ed9053d65a36b18f4a9c45a2120a5b7a0a"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1748893efd05cf4a59a175d7fa1e4fbb652f4d84ccaa2109f7869a2be48ed25e"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a6fe752205caae534f29fba907e2f59ff79aa42c6205ce9a467e9406cbac68c"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3456df087ea61a0972a5bc165aed132ed6ddcc63f5749e572f9fff84540bdbad"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f3d916606dcf5610d4367918245b3d9d8cd0d2ec0b7043d1bbb8c50fe9815c3a"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fc01bc1079279ec342f0f1b6a107b3f5dc3169c33369cf96ada6e2e171f74e86"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-win32.whl", hash = "sha256:2dd01257e4feff986d256fa0bac9f56de59dc735eceeeb83de1c126e2e91f653"}, + {file = "grpcio_tools-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b93ae8ffd18e9af9a965ebca5fa521e89066267de7abdde20721edc04e42721"}, +] + +[package.dependencies] +grpcio = ">=1.60.0" +protobuf = ">=4.21.6,<5.0dev" +setuptools = "*" + [[package]] name = "gunicorn" version = "20.1.0" @@ -1894,13 +1824,13 @@ trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" -version = "0.25.2" +version = "0.26.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, ] [package.dependencies] @@ -1918,13 +1848,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.19.4" +version = "0.20.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.19.4-py3-none-any.whl", hash = "sha256:dba013f779da16f14b606492828f3760600a1e1801432d09fe1c33e50b825bb5"}, - {file = "huggingface_hub-0.19.4.tar.gz", hash = "sha256:176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22"}, + {file = "huggingface_hub-0.20.1-py3-none-any.whl", hash = "sha256:ecfdea395a8bc68cd160106c5bd857f7e010768d95f9e1862a779010cc304831"}, + {file = "huggingface_hub-0.20.1.tar.gz", hash = "sha256:8c88c4c3c8853e22f2dfb4d84c3d493f4e1af52fb3856a90e1eeddcf191ddbb1"}, ] [package.dependencies] @@ -1937,15 +1867,14 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -docs = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "hf-doc-builder", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)", "watchdog"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] @@ -1990,13 +1919,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "7.0.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] @@ -2007,17 +1936,6 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - [[package]] name = "instructor" version = "0.3.5" @@ -2053,6 +1971,16 @@ typing_extensions = "*" [package.extras] aws = ["boto3"] +[[package]] +name = "iso639" +version = "0.1.4" +description = "ISO639-2 support for Python." +optional = false +python-versions = "*" +files = [ + {file = "iso639-0.1.4.tar.gz", hash = "sha256:88b70cf6c64ee9c2c2972292818c8beb32db9ea6f4de1f8471a9b081a3d92e98"}, +] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -2325,13 +2253,13 @@ six = "*" [[package]] name = "langsmith" -version = "0.0.70" +version = "0.0.75" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.70-py3-none-any.whl", hash = "sha256:a0d4cac3af94fe44c2ef3814c32b6740f92aebe267e395d62e62040bc5bad343"}, - {file = "langsmith-0.0.70.tar.gz", hash = "sha256:3a546c45e67f6600d6669ef63f1f58b772e505703126338ad4f22fe0e2bbf677"}, + {file = "langsmith-0.0.75-py3-none-any.whl", hash = "sha256:3e008854204c5eaae007f34c7e249059218605689c385c037f6a40cac044833b"}, + {file = "langsmith-0.0.75.tar.gz", hash = "sha256:3fd44c58bd53cb9366af3de129c7f11b6947914f1bb598a585240df0e2c566eb"}, ] [package.dependencies] @@ -2438,110 +2366,111 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio [[package]] name = "lxml" -version = "4.9.3" +version = "4.9.4" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, + {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"}, + {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"}, + {file = "lxml-4.9.4-cp27-cp27m-win32.whl", hash = "sha256:7d1d6c9e74c70ddf524e3c09d9dc0522aba9370708c2cb58680ea40174800013"}, + {file = "lxml-4.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:cb53669442895763e61df5c995f0e8361b61662f26c1b04ee82899c2789c8f69"}, + {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:647bfe88b1997d7ae8d45dabc7c868d8cb0c8412a6e730a7651050b8c7289cf2"}, + {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d973729ce04784906a19108054e1fd476bc85279a403ea1a72fdb051c76fa48"}, + {file = "lxml-4.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:056a17eaaf3da87a05523472ae84246f87ac2f29a53306466c22e60282e54ff8"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aaa5c173a26960fe67daa69aa93d6d6a1cd714a6eb13802d4e4bd1d24a530644"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:647459b23594f370c1c01768edaa0ba0959afc39caeeb793b43158bb9bb6a663"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bdd9abccd0927673cffe601d2c6cdad1c9321bf3437a2f507d6b037ef91ea307"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:00e91573183ad273e242db5585b52670eddf92bacad095ce25c1e682da14ed91"}, + {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a602ed9bd2c7d85bd58592c28e101bd9ff9c718fbde06545a70945ffd5d11868"}, + {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de362ac8bc962408ad8fae28f3967ce1a262b5d63ab8cefb42662566737f1dc7"}, + {file = "lxml-4.9.4-cp310-cp310-win32.whl", hash = "sha256:33714fcf5af4ff7e70a49731a7cc8fd9ce910b9ac194f66eaa18c3cc0a4c02be"}, + {file = "lxml-4.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:d3caa09e613ece43ac292fbed513a4bce170681a447d25ffcbc1b647d45a39c5"}, + {file = "lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:43498ea734ccdfb92e1886dfedaebeb81178a241d39a79d5351ba2b671bff2b2"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9a2b5915c333e4364367140443b59f09feae42184459b913f0f41b9fed55794a"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d82411dbf4d3127b6cde7da0f9373e37ad3a43e89ef374965465928f01c2b979"}, + {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:273473d34462ae6e97c0f4e517bd1bf9588aa67a1d47d93f760a1282640e24ac"}, + {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:389d2b2e543b27962990ab529ac6720c3dded588cc6d0f6557eec153305a3622"}, + {file = "lxml-4.9.4-cp311-cp311-win32.whl", hash = "sha256:8aecb5a7f6f7f8fe9cac0bcadd39efaca8bbf8d1bf242e9f175cbe4c925116c3"}, + {file = "lxml-4.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8"}, + {file = "lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8"}, + {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:01bf1df1db327e748dcb152d17389cf6d0a8c5d533ef9bab781e9d5037619229"}, + {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8f9f93a23634cfafbad6e46ad7d09e0f4a25a2400e4a64b1b7b7c0fbaa06d9d"}, + {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f3f00a9061605725df1816f5713d10cd94636347ed651abdbc75828df302b20"}, + {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:953dd5481bd6252bd480d6ec431f61d7d87fdcbbb71b0d2bdcfc6ae00bb6fb10"}, + {file = "lxml-4.9.4-cp312-cp312-win32.whl", hash = "sha256:266f655d1baff9c47b52f529b5f6bec33f66042f65f7c56adde3fcf2ed62ae8b"}, + {file = "lxml-4.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56"}, + {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23d891e5bdc12e2e506e7d225d6aa929e0a0368c9916c1fddefab88166e98b20"}, + {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e96a1788f24d03e8d61679f9881a883ecdf9c445a38f9ae3f3f193ab6c591c66"}, + {file = "lxml-4.9.4-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:5557461f83bb7cc718bc9ee1f7156d50e31747e5b38d79cf40f79ab1447afd2d"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:fdb325b7fba1e2c40b9b1db407f85642e32404131c08480dd652110fc908561b"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d74d4a3c4b8f7a1f676cedf8e84bcc57705a6d7925e6daef7a1e54ae543a197"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ac7674d1638df129d9cb4503d20ffc3922bd463c865ef3cb412f2c926108e9a4"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ddd92e18b783aeb86ad2132d84a4b795fc5ec612e3545c1b687e7747e66e2b53"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bd9ac6e44f2db368ef8986f3989a4cad3de4cd55dbdda536e253000c801bcc7"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc354b1393dce46026ab13075f77b30e40b61b1a53e852e99d3cc5dd1af4bc85"}, + {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f836f39678cb47c9541f04d8ed4545719dc31ad850bf1832d6b4171e30d65d23"}, + {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9c131447768ed7bc05a02553d939e7f0e807e533441901dd504e217b76307745"}, + {file = "lxml-4.9.4-cp36-cp36m-win32.whl", hash = "sha256:bafa65e3acae612a7799ada439bd202403414ebe23f52e5b17f6ffc2eb98c2be"}, + {file = "lxml-4.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6197c3f3c0b960ad033b9b7d611db11285bb461fc6b802c1dd50d04ad715c225"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:7b378847a09d6bd46047f5f3599cdc64fcb4cc5a5a2dd0a2af610361fbe77b16"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:1343df4e2e6e51182aad12162b23b0a4b3fd77f17527a78c53f0f23573663545"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6dbdacf5752fbd78ccdb434698230c4f0f95df7dd956d5f205b5ed6911a1367c"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:506becdf2ecaebaf7f7995f776394fcc8bd8a78022772de66677c84fb02dd33d"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca8e44b5ba3edb682ea4e6185b49661fc22b230cf811b9c13963c9f982d1d964"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d9d5726474cbbef279fd709008f91a49c4f758bec9c062dfbba88eab00e3ff9"}, + {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bbdd69e20fe2943b51e2841fc1e6a3c1de460d630f65bde12452d8c97209464d"}, + {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8671622256a0859f5089cbe0ce4693c2af407bc053dcc99aadff7f5310b4aa02"}, + {file = "lxml-4.9.4-cp37-cp37m-win32.whl", hash = "sha256:dd4fda67f5faaef4f9ee5383435048ee3e11ad996901225ad7615bc92245bc8e"}, + {file = "lxml-4.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6bee9c2e501d835f91460b2c904bc359f8433e96799f5c2ff20feebd9bb1e590"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:1f10f250430a4caf84115b1e0f23f3615566ca2369d1962f82bef40dd99cd81a"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b505f2bbff50d261176e67be24e8909e54b5d9d08b12d4946344066d66b3e43"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1449f9451cd53e0fd0a7ec2ff5ede4686add13ac7a7bfa6988ff6d75cff3ebe2"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4ece9cca4cd1c8ba889bfa67eae7f21d0d1a2e715b4d5045395113361e8c533d"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59bb5979f9941c61e907ee571732219fa4774d5a18f3fa5ff2df963f5dfaa6bc"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b1980dbcaad634fe78e710c8587383e6e3f61dbe146bcbfd13a9c8ab2d7b1192"}, + {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9ae6c3363261021144121427b1552b29e7b59de9d6a75bf51e03bc072efb3c37"}, + {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bcee502c649fa6351b44bb014b98c09cb00982a475a1912a9881ca28ab4f9cd9"}, + {file = "lxml-4.9.4-cp38-cp38-win32.whl", hash = "sha256:a8edae5253efa75c2fc79a90068fe540b197d1c7ab5803b800fccfe240eed33c"}, + {file = "lxml-4.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:701847a7aaefef121c5c0d855b2affa5f9bd45196ef00266724a80e439220e46"}, + {file = "lxml-4.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f610d980e3fccf4394ab3806de6065682982f3d27c12d4ce3ee46a8183d64a6a"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aa9b5abd07f71b081a33115d9758ef6077924082055005808f68feccb27616bd"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:365005e8b0718ea6d64b374423e870648ab47c3a905356ab6e5a5ff03962b9a9"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:16b9ec51cc2feab009e800f2c6327338d6ee4e752c76e95a35c4465e80390ccd"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a905affe76f1802edcac554e3ccf68188bea16546071d7583fb1b693f9cf756b"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd814847901df6e8de13ce69b84c31fc9b3fb591224d6762d0b256d510cbf382"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91bbf398ac8bb7d65a5a52127407c05f75a18d7015a270fdd94bbcb04e65d573"}, + {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f99768232f036b4776ce419d3244a04fe83784bce871b16d2c2e984c7fcea847"}, + {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bb5bd6212eb0edfd1e8f254585290ea1dadc3687dd8fd5e2fd9a87c31915cdab"}, + {file = "lxml-4.9.4-cp39-cp39-win32.whl", hash = "sha256:88f7c383071981c74ec1998ba9b437659e4fd02a3c4a4d3efc16774eb108d0ec"}, + {file = "lxml-4.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:936e8880cc00f839aa4173f94466a8406a96ddce814651075f95837316369899"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:f6c35b2f87c004270fa2e703b872fcc984d714d430b305145c39d53074e1ffe0"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:606d445feeb0856c2b424405236a01c71af7c97e5fe42fbc778634faef2b47e4"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1bdcbebd4e13446a14de4dd1825f1e778e099f17f79718b4aeaf2403624b0f7"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0a08c89b23117049ba171bf51d2f9c5f3abf507d65d016d6e0fa2f37e18c0fc5"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:232fd30903d3123be4c435fb5159938c6225ee8607b635a4d3fca847003134ba"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:231142459d32779b209aa4b4d460b175cadd604fed856f25c1571a9d78114771"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:520486f27f1d4ce9654154b4494cf9307b495527f3a2908ad4cb48e4f7ed7ef7"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:562778586949be7e0d7435fcb24aca4810913771f845d99145a6cee64d5b67ca"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a9e7c6d89c77bb2770c9491d988f26a4b161d05c8ca58f63fb1f1b6b9a74be45"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:786d6b57026e7e04d184313c1359ac3d68002c33e4b1042ca58c362f1d09ff58"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95ae6c5a196e2f239150aa4a479967351df7f44800c93e5a975ec726fef005e2"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:9b556596c49fa1232b0fff4b0e69b9d4083a502e60e404b44341e2f8fb7187f5"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cc02c06e9e320869d7d1bd323df6dd4281e78ac2e7f8526835d3d48c69060683"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:857d6565f9aa3464764c2cb6a2e3c2e75e1970e877c188f4aeae45954a314e0c"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c42ae7e010d7d6bc51875d768110c10e8a59494855c3d4c348b068f5fb81fdcd"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f10250bb190fb0742e3e1958dd5c100524c2cc5096c67c8da51233f7448dc137"}, + {file = "lxml-4.9.4.tar.gz", hash = "sha256:b1541e50b78e15fa06a2670157a1962ef06591d4c998b998047fff5e3236880e"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] +source = ["Cython (==0.29.37)"] [[package]] name = "makefun" @@ -2864,34 +2793,6 @@ files = [ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] -[[package]] -name = "multiprocess" -version = "0.70.15" -description = "better multiprocessing and multithreading in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"}, - {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"}, - {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"}, - {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"}, - {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"}, - {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"}, - {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"}, - {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"}, - {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"}, - {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"}, -] - -[package.dependencies] -dill = ">=0.3.7" - [[package]] name = "murmurhash" version = "1.0.10" @@ -3425,37 +3326,102 @@ pypdfium2 = ">=4.18.0" [[package]] name = "pendulum" -version = "2.1.2" +version = "3.0.0" description = "Python datetimes made easy" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, ] [package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] [[package]] name = "pexpect" @@ -3578,21 +3544,6 @@ files = [ packaging = "*" tenacity = ">=6.2.0" -[[package]] -name = "pluggy" -version = "1.3.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - [[package]] name = "ply" version = "3.11" @@ -3671,58 +3622,47 @@ murmurhash = ">=0.28.0,<1.1.0" [[package]] name = "protobuf" -version = "3.20.3" -description = "Protocol Buffers" +version = "4.25.1" +description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, - {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, - {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, - {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, - {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, - {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, - {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, - {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, - {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, - {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, - {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, - {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, - {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, - {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, - {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, - {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, + {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, + {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, + {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, + {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, + {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, + {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, + {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, + {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, + {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, ] [[package]] name = "psutil" -version = "5.9.6" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -3759,65 +3699,6 @@ files = [ {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -[[package]] -name = "pyarrow" -version = "14.0.1" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, - {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, - {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, - {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, - {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, - {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, - {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, - {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyarrow-hotfix" -version = "0.6" -description = "" -optional = false -python-versions = ">=3.5" -files = [ - {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, - {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, -] - [[package]] name = "pyasn1" version = "0.5.1" @@ -3875,18 +3756,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.2" +version = "2.5.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, + {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.14.6" typing-extensions = ">=4.6.1" [package.extras] @@ -3894,116 +3775,116 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.14.6" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, + {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, + {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, + {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, + {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, + {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, + {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, + {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, + {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, + {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, + {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, + {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, + {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, + {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, + {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, + {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, ] [package.dependencies] @@ -4026,42 +3907,42 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymupdf" -version = "1.23.7" +version = "1.23.8" description = "A high performance Python library for data extraction, analysis, conversion & manipulation of PDF (and other) documents." optional = false python-versions = ">=3.8" files = [ - {file = "PyMuPDF-1.23.7-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:74982a3e0186f5525c2f090863f75b593994fd7e0e10c4f2605159b800f3ca0b"}, - {file = "PyMuPDF-1.23.7-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:c996b11e015027638296d5923d53559de0493f146b3ca0bab76b3ee0db0bc6eb"}, - {file = "PyMuPDF-1.23.7-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:3a6c0cfe9686edfe96e885bdec995588b6cdf78e69b1588a5d61e60756cfe824"}, - {file = "PyMuPDF-1.23.7-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:a164a71e5d02bb31f136e5afbf4048c47c93125f8fac0eedf6a868bc5f40c675"}, - {file = "PyMuPDF-1.23.7-cp310-none-win32.whl", hash = "sha256:a5eaf107d23c4b1281cfbe189dae634d2e749c20ca3d3bf3d162cf2357c5024b"}, - {file = "PyMuPDF-1.23.7-cp310-none-win_amd64.whl", hash = "sha256:bdb2423bccb07218a42b1dcdfeb91a96ce001e872263fb545132000cd087bda0"}, - {file = "PyMuPDF-1.23.7-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:ea9b4ed48509faa6b6f8834401c586c3780f4dcd28cdc3013a3d12bcb2153aa0"}, - {file = "PyMuPDF-1.23.7-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:c27f5916623704da30608ac54c7880fe44b8f9f7a9c9fc6332e216599c536db9"}, - {file = "PyMuPDF-1.23.7-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:ce5f0d37a5086d7601c775919963f1677342c644d7ad00e92f6b56b8ec48b667"}, - {file = "PyMuPDF-1.23.7-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:dfca3952b285747f9d84b57f97902e2a33fd80f0556557c55b2da358da38e48c"}, - {file = "PyMuPDF-1.23.7-cp311-none-win32.whl", hash = "sha256:bb302a798332260870cc6540bab28530b2ecd57447b3ce464da1b501dc1813b8"}, - {file = "PyMuPDF-1.23.7-cp311-none-win_amd64.whl", hash = "sha256:8afbfe6c771cec7f28cdf8f460b92d973d233a42712a87e24cee225d88aaf1f5"}, - {file = "PyMuPDF-1.23.7-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:cc18057a83e06871e6242ad39bcede65c53aa8d135d267edb05711ffee9e669a"}, - {file = "PyMuPDF-1.23.7-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:ad743d866749a399ef61086b2b6985d3212bd985fd972d55a288e9b53a73dd98"}, - {file = "PyMuPDF-1.23.7-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:a4e145914f929338dd9648f03b8cf9a8baba86c00410e5874dce8282fbd6b6ed"}, - {file = "PyMuPDF-1.23.7-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:e4c1c3c75297d986da7266c6c39aee7b30783445468f58cb1b9659872f905cd8"}, - {file = "PyMuPDF-1.23.7-cp312-none-win32.whl", hash = "sha256:7abb49faee62ddacb8b6dc4bbab3e9a3cb35d8782f2c461b42d178ff4af63da2"}, - {file = "PyMuPDF-1.23.7-cp312-none-win_amd64.whl", hash = "sha256:59fe0f0c1d2e8d9ab678cf4c937e64bcaf551602ee7d8c80dc489c92ddb3cfe2"}, - {file = "PyMuPDF-1.23.7-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:b487d49fc79a45e005cd06840f9c5f348b1aa85329d9e35c4eb924d7ae19c9b2"}, - {file = "PyMuPDF-1.23.7-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c13cbb6bd7549814877cc5e4b0063090b9e4029063dd90e68b43541205508fe2"}, - {file = "PyMuPDF-1.23.7-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:4fa6028040028be35bedadc18c16892e4d298319f8c7f071d5305b0ab84a0121"}, - {file = "PyMuPDF-1.23.7-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:d7ba9293c5c828cc1c10bee07f375ec3d032950cf0dba3514a4a93bae347d83b"}, - {file = "PyMuPDF-1.23.7-cp38-none-win32.whl", hash = "sha256:d53c1d06989b32e5fce62d55dee59c6e534d5ed289fee37f5af3e0b009b63677"}, - {file = "PyMuPDF-1.23.7-cp38-none-win_amd64.whl", hash = "sha256:43f00c7713124e36db2feca737ad9228d283d5b2ca3e01643b40af636a095cc9"}, - {file = "PyMuPDF-1.23.7-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:130ae62ba55ced20eb35088968fd158651b66a510b60b25fcd8d62b58633dd02"}, - {file = "PyMuPDF-1.23.7-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:835d9f922c3a6612cd202aaa6387ef83741f6ce1bb1c50b814298b27072fea69"}, - {file = "PyMuPDF-1.23.7-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:c82093000ae12b5c6e9334b272da37f280968e33b4fcd122169af6f9abb71b0e"}, - {file = "PyMuPDF-1.23.7-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:9b4024876ea72d1d3172c6adfc5cf69eb397ed8f773f36d0b7ed9fb88b134ace"}, - {file = "PyMuPDF-1.23.7-cp39-none-win32.whl", hash = "sha256:96284e9d5a28ed3125355d129fe6a20c2223da861bc8527188e55608f06cbdf0"}, - {file = "PyMuPDF-1.23.7-cp39-none-win_amd64.whl", hash = "sha256:365f772d7e32ff1f7bb3ee4cb502d71d5919566b61c3d9c350d1a61c5c5b3073"}, - {file = "PyMuPDF-1.23.7.tar.gz", hash = "sha256:53b7c03a2f179943fadcb723440ef5832b5f60aa39fc1505ff37cafa209c63ea"}, + {file = "PyMuPDF-1.23.8-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:34dbdddd71ccb494a8e729580acf895febcbfd6681d6f85403e8ead665a01016"}, + {file = "PyMuPDF-1.23.8-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:74f1d35a6b2cdbb45bb3e8d14336a4afc227e7339ce1b632aa29ace49313bfe6"}, + {file = "PyMuPDF-1.23.8-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:d4dd5dd54abb2c413812cbd1469244e18f32f89990a8341098337e617eca875a"}, + {file = "PyMuPDF-1.23.8-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:03985273a69bb980ae5640ac8e1e193b53a61a175bb446ee7fabc78fd9409a71"}, + {file = "PyMuPDF-1.23.8-cp310-none-win32.whl", hash = "sha256:099ec6b82f7082731c966f9d2874d5638884e864e31d4b50b1ad3b0954497399"}, + {file = "PyMuPDF-1.23.8-cp310-none-win_amd64.whl", hash = "sha256:a3b54705c152f60c7b8abea40253731caa7aebc5c10e5547e8d12f93546c5b1e"}, + {file = "PyMuPDF-1.23.8-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:4f62b2940d88ffcc706c1a5d21efa24a01b65d1c87f0d4669d03b136c984098b"}, + {file = "PyMuPDF-1.23.8-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:58ab6e7121550767ff4e595800317c9acc8d5c1a3ddaf9116f257bb8159af501"}, + {file = "PyMuPDF-1.23.8-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:4a8f8322daabcbae02fe31cfa270000ebb31a327c766cd7423fff38ab55b3b4e"}, + {file = "PyMuPDF-1.23.8-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:ed917f7b66c332e5fb6bcda2dcb71b6eddeca24e4d0ea7984e0cb3628fbee894"}, + {file = "PyMuPDF-1.23.8-cp311-none-win32.whl", hash = "sha256:dec10e23b2dd813fe75d60db0af38b4b640ad6066cb57afe3536273d8740d15e"}, + {file = "PyMuPDF-1.23.8-cp311-none-win_amd64.whl", hash = "sha256:9d272e46cd08e65c5811ad9be84bf4fd5f559e538eae87694d5a4685585c633e"}, + {file = "PyMuPDF-1.23.8-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:e083fbd3a6c1292ddd564cf7187cf0a333ef79c73afb31532e0b26129df3d3b4"}, + {file = "PyMuPDF-1.23.8-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:fde13b2e5233a77e2b27e80e83d4f8ae3532e77f4870233e62d09b2c0349389c"}, + {file = "PyMuPDF-1.23.8-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:6198b94a276faa370800fbe0e32cea5ca632e50310011d3e60e398e53a9f4ebf"}, + {file = "PyMuPDF-1.23.8-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:157518a1f595ff469423f3e867a53468137a43d97041d624d72aab44eea28c67"}, + {file = "PyMuPDF-1.23.8-cp312-none-win32.whl", hash = "sha256:8e6dcb03473058022354de687a6264309b27582e140eea0688bc96529c27228b"}, + {file = "PyMuPDF-1.23.8-cp312-none-win_amd64.whl", hash = "sha256:07947f0e1e7439ceb244009ec27c23a6cf44f5ac6c39c23259ea64f54af37acc"}, + {file = "PyMuPDF-1.23.8-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:d6cda66e13d2aaf2db081db63be852379b27636e46a8e0384983696ac4719de8"}, + {file = "PyMuPDF-1.23.8-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:238aff47b54cb36b0b0ad2f3dedf19b17a457064c78fc239a4529cc61f5fdbf3"}, + {file = "PyMuPDF-1.23.8-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:551d0fbe64154db71aabf3c4c29d815db2d8671c1600f91a96240e2cfa8284a3"}, + {file = "PyMuPDF-1.23.8-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:c3b7fabd4ffad84a25c1daf2074deae1c129ce77a390a2d37598ecbc6f2b0bc8"}, + {file = "PyMuPDF-1.23.8-cp38-none-win32.whl", hash = "sha256:2b20ec14018ca81243d4386da538d208c8969cb441dabed5fd2a5bc52863e18c"}, + {file = "PyMuPDF-1.23.8-cp38-none-win_amd64.whl", hash = "sha256:809eb5633bb3851a535a66a96212123289a6adf54b5cd187d50233a056740afd"}, + {file = "PyMuPDF-1.23.8-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:129369a2981725841824d8c2369800b0cfb4e88b57d58ef512c3bbeeb43968c4"}, + {file = "PyMuPDF-1.23.8-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:8e745754a9ffcd4475cd077c6423b02c77f5c98dd654c613511def033608c430"}, + {file = "PyMuPDF-1.23.8-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:2b0e631e8d0549bca000ca10ff51688fd7aca5387a54e62c2c0ad5336385b152"}, + {file = "PyMuPDF-1.23.8-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:b6fa6c229e0dd83b8edf9a36952c41508ee6736dfa9ab706e3c9f5fb0953b214"}, + {file = "PyMuPDF-1.23.8-cp39-none-win32.whl", hash = "sha256:e96badb750f9952978615d0c61297f5bb7af718c9c318a09d70b8ba6e03c8cd8"}, + {file = "PyMuPDF-1.23.8-cp39-none-win_amd64.whl", hash = "sha256:4cca014862818330acdb4aa14ce7a792cb9e8cf3e81446340664c1af87dcb57c"}, + {file = "PyMuPDF-1.23.8.tar.gz", hash = "sha256:d8d60fded2a9b72b3535940bbee2066e4927cfaf66e1179f1bb06a8fdda6d4af"}, ] [package.dependencies] @@ -4098,13 +3979,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "3.17.2" +version = "3.17.4" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.6" files = [ - {file = "pypdf-3.17.2-py3-none-any.whl", hash = "sha256:e149ed50aa41e04b176246714806cd8d6c6c6d68b528508f849642959041963a"}, - {file = "pypdf-3.17.2.tar.gz", hash = "sha256:d6f077060912f8292d7db3da04f7bf2428ac974781e11eef219193a22120f649"}, + {file = "pypdf-3.17.4-py3-none-any.whl", hash = "sha256:6aa0f61b33779b64486de3f42835d3668badd48dac4a536aeb87da187a5eacd2"}, + {file = "pypdf-3.17.4.tar.gz", hash = "sha256:ec96e2e4fc9648ac609d19c00d41e9d606e0ae2ce5a0bbe7691426f5f157166a"}, ] [package.extras] @@ -4147,16 +4028,6 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] -[[package]] -name = "pysbd" -version = "0.3.4" -description = "pysbd (Python Sentence Boundary Disambiguation) is a rule-based sentence boundary detection that works out-of-the-box across many languages." -optional = false -python-versions = ">=3" -files = [ - {file = "pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953"}, -] - [[package]] name = "pytesseract" version = "0.3.10" @@ -4172,48 +4043,6 @@ files = [ packaging = ">=21.3" Pillow = ">=8.0.0" -[[package]] -name = "pytest" -version = "7.4.3" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-xdist" -version = "3.5.0" -description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, -] - -[package.dependencies] -execnet = ">=1.1" -pytest = ">=6.2.0" - -[package.extras] -psutil = ["psutil (>=3.0)"] -setproctitle = ["setproctitle"] -testing = ["filelock"] - [[package]] name = "python-dateutil" version = "2.8.2" @@ -4313,17 +4142,6 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - [[package]] name = "pywin32" version = "306" @@ -4396,126 +4214,103 @@ files = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -[[package]] -name = "ragas" -version = "0.0.22" -description = "" -optional = false -python-versions = "*" -files = [ - {file = "ragas-0.0.22-py3-none-any.whl", hash = "sha256:528d7f8c539d1499885fe4316bf791f9d3e2ae3d72e597af3ffba83adfc84625"}, - {file = "ragas-0.0.22.tar.gz", hash = "sha256:925c03729fa6a3826b970ad53772d6726ecd1046b13a44f4fe97524a4341495c"}, -] - -[package.dependencies] -datasets = "*" -langchain = "*" -nest-asyncio = "*" -numpy = "*" -openai = ">1" -pysbd = ">=0.3.4" -tiktoken = "*" - -[package.extras] -all = ["sentence-transformers"] - [[package]] name = "rapidfuzz" -version = "3.5.2" +version = "3.6.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a047d6e58833919d742bbc0dfa66d1de4f79e8562ee195007d3eae96635df39"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22877c027c492b7dc7e3387a576a33ed5aad891104aa90da2e0844c83c5493ef"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e0f448b0eacbcc416feb634e1232a48d1cbde5e60f269c84e4fb0912f7bbb001"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05146497672f869baf41147d5ec1222788c70e5b8b0cfcd6e95597c75b5b96b"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f2df3968738a38d2a0058b5e721753f5d3d602346a1027b0dde31b0476418f3"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5afc1fcf1830f9bb87d3b490ba03691081b9948a794ea851befd2643069a30c1"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84be69ea65f64fa01e5c4976be9826a5aa949f037508887add42da07420d65d6"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8658c1045766e87e0038323aa38b4a9f49b7f366563271f973c8890a98aa24b5"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:852b3f93c15fce58b8dc668bd54123713bfdbbb0796ba905ea5df99cfd083132"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:12424a06ad9bd0cbf5f7cea1015e78d924a0034a0e75a5a7b39c0703dcd94095"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b4e9ded8e80530bd7205a7a2b01802f934a4695ca9e9fbe1ce9644f5e0697864"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:affb8fe36157c2dc8a7bc45b6a1875eb03e2c49167a1d52789144bdcb7ab3b8c"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1d33a622572d384f4c90b5f7a139328246ab5600141e90032b521c2127bd605"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win32.whl", hash = "sha256:2cf9f2ed4a97b388cffd48d534452a564c2491f68f4fd5bc140306f774ceb63a"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:6541ffb70097885f7302cd73e2efd77be99841103023c2f9408551f27f45f7a5"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win_arm64.whl", hash = "sha256:1dd2542e5103fb8ca46500a979ae14d1609dcba11d2f9fe01e99eec03420e193"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bff7d3127ebc5cd908f3a72f6517f31f5247b84666137556a8fcc5177c560939"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fdfdb3685b631d8efbb6d6d3d86eb631be2b408d9adafcadc11e63e3f9c96dec"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97b043fe8185ec53bb3ff0e59deb89425c0fc6ece6e118939963aab473505801"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a4a7832737f87583f3863dc62e6f56dd4a9fefc5f04a7bdcb4c433a0f36bb1b"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d876dba9a11fcf60dcf1562c5a84ef559db14c2ceb41e1ad2d93cd1dc085889"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa4c0612893716bbb6595066ca9ecb517c982355abe39ba9d1f4ab834ace91ad"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:120316824333e376b88b284724cfd394c6ccfcb9818519eab5d58a502e5533f0"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cdbe8e80cc186d55f748a34393533a052d855357d5398a1ccb71a5021b58e8d"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1062425c8358a547ae5ebad148f2e0f02417716a571b803b0c68e4d552e99d32"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66be181965aff13301dd5f9b94b646ce39d99c7fe2fd5de1656f4ca7fafcb38c"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:53df7aea3cf301633cfa2b4b2c2d2441a87dfc878ef810e5b4eddcd3e68723ad"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:76639dca5eb0afc6424ac5f42d43d3bd342ac710e06f38a8c877d5b96de09589"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:27689361c747b5f7b8a26056bc60979875323f1c3dcaaa9e2fec88f03b20a365"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win32.whl", hash = "sha256:99c9fc5265566fb94731dc6826f43c5109e797078264e6389a36d47814473692"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:666928ee735562a909d81bd2f63207b3214afd4ca41f790ab3025d066975c814"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:d55de67c48f06b7772541e8d4c062a2679205799ce904236e2836cb04c106442"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:04e1e02b182283c43c866e215317735e91d22f5d34e65400121c04d5ed7ed859"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:365e544aba3ac13acf1a62cb2e5909ad2ba078d0bfc7d69b1f801dfd673b9782"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b61f77d834f94b0099fa9ed35c189b7829759d4e9c2743697a130dd7ba62259f"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43fb368998b9703fa8c63db292a8ab9e988bf6da0c8a635754be8e69da1e7c1d"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25510b5d142c47786dbd27cfd9da7cae5bdea28d458379377a3644d8460a3404"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf3093443751e5a419834162af358d1e31dec75f84747a91dbbc47b2c04fc085"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fbaf546f15a924613f89d609ff66b85b4f4c2307ac14d93b80fe1025b713138"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d580df0e130ed85400ff77e1c32d965e9bc7be29ac4072ab637f57e26d29fb"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:358a0fbc49343de20fee8ebdb33c7fa8f55a9ff93ff42d1ffe097d2caa248f1b"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fb379ac0ddfc86c5542a225d194f76ed468b071b6f79ff57c4b72e635605ad7d"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7fb21e182dc6d83617e88dea002963d5cf99cf5eabbdbf04094f503d8fe8d723"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c04f9f1310ce414ab00bdcbf26d0906755094bfc59402cb66a7722c6f06d70b2"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6da61cc38c1a95efc5edcedf258759e6dbab73191651a28c5719587f32a56ad"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win32.whl", hash = "sha256:f823fd1977071486739f484e27092765d693da6beedaceece54edce1dfeec9b2"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:a8162d81486de85ab1606e48e076431b66d44cf431b2b678e9cae458832e7147"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:dfc63fabb7d8da8483ca836bae7e55766fe39c63253571e103c034ba8ea80950"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:df8fae2515a1e4936affccac3e7d506dd904de5ff82bc0b1433b4574a51b9bfb"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dd6384780c2a16097d47588844cd677316a90e0f41ef96ff485b62d58de79dcf"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:467a4d730ae3bade87dba6bd769e837ab97e176968ce20591fe8f7bf819115b1"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54576669c1502b751b534bd76a4aeaaf838ed88b30af5d5c1b7d0a3ca5d4f7b5"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abafeb82f85a651a9d6d642a33dc021606bc459c33e250925b25d6b9e7105a2e"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73e14617a520c0f1bc15eb78c215383477e5ca70922ecaff1d29c63c060e04ca"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7cdf92116e9dfe40da17f921cdbfa0039dde9eb158914fa5f01b1e67a20b19cb"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1962d5ccf8602589dbf8e85246a0ee2b4050d82fade1568fb76f8a4419257704"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:db45028eae2fda7a24759c69ebeb2a7fbcc1a326606556448ed43ee480237a3c"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b685abb8b6d97989f6c69556d7934e0e533aa8822f50b9517ff2da06a1d29f23"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:40139552961018216b8cd88f6df4ecbbe984f907a62a5c823ccd907132c29a14"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0fef4705459842ef8f79746d6f6a0b5d2b6a61a145d7d8bbe10b2e756ea337c8"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6b2ad5516f7068c7d9cbcda8ac5906c589e99bc427df2e1050282ee2d8bc2d58"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-win32.whl", hash = "sha256:2da3a24c2f7dfca7f26ba04966b848e3bbeb93e54d899908ff88dfe3e1def9dc"}, - {file = "rapidfuzz-3.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:e3f2be79d4114d01f383096dbee51b57df141cb8b209c19d0cf65f23a24e75ba"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:089a7e96e5032821af5964d8457fcb38877cc321cdd06ad7c5d6e3d852264cb9"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75d8a52bf8d1aa2ac968ae4b21b83b94fc7e5ea3dfbab34811fc60f32df505b2"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2bacce6bbc0362f0789253424269cc742b1f45e982430387db3abe1d0496e371"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5fd627e604ddc02db2ddb9ddc4a91dd92b7a6d6378fcf30bb37b49229072b89"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2e8b369f23f00678f6e673572209a5d3b0832f4991888e3df97af7b8b9decf3"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c29958265e4c2b937269e804b8a160c027ee1c2627d6152655008a8b8083630e"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00be97f9219355945c46f37ac9fa447046e6f7930f7c901e5d881120d1695458"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0d8d57e0f556ef38c24fee71bfe8d0db29c678bff2acd1819fc1b74f331c2"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de89585268ed8ee44e80126814cae63ff6b00d08416481f31b784570ef07ec59"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:908ff2de9c442b379143d1da3c886c63119d4eba22986806e2533cee603fe64b"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:54f0061028723c026020f5bb20649c22bc8a0d9f5363c283bdc5901d4d3bff01"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b581107ec0c610cdea48b25f52030770be390db4a9a73ca58b8d70fa8a5ec32e"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1d5a686ea258931aaa38019204bdc670bbe14b389a230b1363d84d6cf4b9dc38"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-win32.whl", hash = "sha256:97f811ca7709c6ee8c0b55830f63b3d87086f4abbcbb189b4067e1cd7014db7b"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:58ee34350f8c292dd24a050186c0e18301d80da904ef572cf5fda7be6a954929"}, - {file = "rapidfuzz-3.5.2-cp39-cp39-win_arm64.whl", hash = "sha256:c5075ce7b9286624cafcf36720ef1cfb2946d75430b87cb4d1f006e82cd71244"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af5221e4f7800db3e84c46b79dba4112e3b3cc2678f808bdff4fcd2487073846"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8501d7875b176930e6ed9dbc1bc35adb37ef312f6106bd6bb5c204adb90160ac"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e414e1ca40386deda4291aa2d45062fea0fbaa14f95015738f8bb75c4d27f862"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2059cd73b7ea779a9307d7a78ed743f0e3d33b88ccdcd84569abd2953cd859f"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:58e3e21f6f13a7cca265cce492bc797425bd4cb2025fdd161a9e86a824ad65ce"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b847a49377e64e92e11ef3d0a793de75451526c83af015bdafdd5d04de8a058a"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a42c7a8c62b29c4810e39da22b42524295fcb793f41c395c2cb07c126b729e83"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b5166be86e09e011e92d9862b1fe64c4c7b9385f443fb535024e646d890460"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f808dcb0088a7a496cc9895e66a7b8de55ffea0eb9b547c75dfb216dd5f76ed"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d4b05a8f4ab7e7344459394094587b033fe259eea3a8720035e8ba30e79ab39b"}, - {file = "rapidfuzz-3.5.2.tar.gz", hash = "sha256:9e9b395743e12c36a3167a3a9fd1b4e11d92fb0aa21ec98017ee6df639ed385e"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b21e65818a7e4846bacfa1bd77bc337b02107cc88a7b262dbaeb7944e0c8958d"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3bf1f610954ec936d87d58eb0247af61e35c41a92c30f3cfe0478baf764558bf"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7a4b458096be3b39bf5778e26ac96ac10399b8e4fd40a03fd55a155c093acf5"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a09cdfd9c1a11a91e207df138aa2e20a9267cf5f7cde6e9a53a4551454b06333"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:514b15338b7f59b80bbe014d1ffc0093d17abf96f82eb20d8bb573ce24d84a12"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ba08ba184e530a770b0fc8897f4ce77ae0863039e139ef3180502b37586fec"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:195cc2a216223ff6118a1eb6cddb077cd264bc828ba7064ebb6e3bc61dd9d864"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a743db763cba0aad39e8c8c6a6d1210247cb468f514891632211adad3935a29"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2a317f1a7c318957bd5ca54bf8258263936320c49857fac69c5ed0b10c81ef0d"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96d46e9664c53b07f446ceb0f09973a46766e0fd4a26904e75f067ed78f07db2"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:12eaf1aed5b0ded8c4b638a892b4fda53bf6f9f2d8597d8507ba6d697a34170a"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b9b7baab3245df24447c5dbd6d6ca4ce400e3bb088ffe6c994407c16852157b6"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7927fff9dd2a34522704c665f9846e0f13e1824f014af8f14294858921bed731"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-win32.whl", hash = "sha256:9557792003addc7e141e63fd60edc8d77bbd983c212f0f1683bf542cb0d396d9"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:4c1de6260975d5589374abca04a76c3e968907ccdc6daf5f9dd2b4cca1a7a64d"}, + {file = "rapidfuzz-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:ac328c83d49811b36e2699d5132193c5a82139e1a3d0b340babc1bce7428754e"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8b6205e8d730e0ad3e0472b4fde8f93c740c2165a198ab0ad457e35371b28e08"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c1eccce1125676537322d7bc14ecb917b6464700ea222703e131ccb9d165658"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68dd5912a1b0cc145a674aa45513d595fd6691b263f860d00ac71388ebde09bc"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc6604d6f1be13d9fb6b01442805ae2ec3bcccc54247ecabba8d3712aff9685a"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ed25d6d7b184725bd26ecfe64dfe6a17d261705435e7c38415499b1316120a9"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:187751b2b5c4e5b53ea4c30bcbdf6f545dbd985808c0750f990152e95357638f"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e96db4dfd4bf853898438dbf821c5017229aa8064b4b9a12a4bc7ff3112aa1e"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25d98d242e7626c577ab702902040afb87e257ee93c9575884f82e6e7b4aaec0"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b54cd11ee89b606252c90c5eb711eb6735e2b63305cc8c2e70479166017733a3"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5d7e82a29af3c8052f78c9b2d98a0586ebd6bf41f27298b92b80293c6506e1d7"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a0271e194a5c811db02c7350a119cabde6757587312c70247f6e50984ce36144"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2a31a6f6442219bb494a4d5293abb106f95adfdbad3fef597377f7344883afa9"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48314743e61394e6418cec38a5389dd3ad6c1b33fc15d525898a303450f958e7"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-win32.whl", hash = "sha256:fcf2ab337b7d331c6cbe7a5296b9f703666e7c1b00771a1dbac6e2d62e46b9a4"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3aec96905badee77915f6cd791019aa06376b252ca4da3676339c4f8dd64e8f"}, + {file = "rapidfuzz-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:4d67f9f180faf341bc04be4f633707b773844a9c07f21dd2eabc27ea54591c8e"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:583f9cca4e53b5ff93f86a9cf5ca15a6fed3094a2b49acaa3851b4eb00ea04f9"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90fb0e1f931031a0fa967d77d8be21220d5b8e626546d3e774dc28c5a0aea10d"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62be08cdd370cc491b594b80493850cf83aafec938b3ca2c98fc4d45667baac8"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ea93d044eaf0d0204a088dbaab18ce2cda1bb0738062c8d2834a8b3832f496c"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a99dc8dc8c2d6da1f5b15c9364bcad262f23da098e7bbd54056bee087423d74d"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88813871a373dc600a8ac831b6900ff57c9ed72e61128e065190e30c502b2e7a"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c9c495ebe81a2996cb737060181703822215360abdd60a619839397667f8e4e"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:708457ab87c6eb2aec6f278697b03e088744623c63f450bae2571ce8f29e37d2"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2dfba991ea3e9963a1d24946f7932893384a64369bf3c28c6d07eb8ee4c4fc86"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:346422d616bdc9d90c2d800f963cde370c4bdc3b99ea1d9bd7b16d43f88d4313"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:17b23ef8fae6aa08fe0a02b9e171ef6443ad876bebfdab8d491e07942e36bca6"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:6be8fd7b7a39cb2655a0d216da8fc424bc334cfe43f1dcf00fbc3e0426252a35"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac480cdef530a5a9c8e560e5f77783f3dccd65373e0a09dabe32446e212ea3c4"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-win32.whl", hash = "sha256:a6ae64784f0e8a7f989e0d24a401fce68fbe37b9e0d61c24ec983828b1dee768"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0938400bb41f738809aae6cd9b4d9985ec2c8cfb34c4931c5a16dba76edf58c"}, + {file = "rapidfuzz-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:9a09731ed953126ac3f89f5a8d4998f93eca8a81e41e57b173edc3e44f0afd20"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:606eb61b04f1f023048dae46e9a002354bde304a192d3b7f6fcc6e75d3910879"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:40e381099e36df52e073abe18aa24e9ace17b7800c2b38d7818b9c77ba22a622"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8ee4151e1ca81cdc8247847d8e041f2ed9e087d550bc21088506b1599c4c842a"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a829e8486988889d6a316f528d92364a88c11a10fb53a8b981ae9cd52ab5846b"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d58f3fd98495d3597137056eb88372ac9360b74a46ab298115230f259e1efa2"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76e3d6507ce10696765709093cdedfc814e64960535dcd4f684564c02b6e6d07"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f70743f5626743dfab402697953ce0e9458d52000a2d5f52ae0e110facfd62bd"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40e52fcd1b61421de465113624bc802546b106fa02aa28b001b0db493651fd1"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cb2ddc1afe32fc8d70dc37d825a04ab1df98a91f40ad6b17d976c5b6fbd99130"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:67a97425abac462e0cb6be02a9adf581577d72e18aa922ef121400e203273657"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:0c8d8e72030c7ad2b486a6cdcb0af7b51c0041db1e9e0e6c41fa3e3d244df284"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5475ea451df17a802c3f78c58f63701c2e99762ce71b9953c2a8da574f450807"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:19159a69f9d1132cffeb5fcea344f09ec02212654b92a913c9d3deaf3d387f46"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-win32.whl", hash = "sha256:da1f2cff6f0128f1ff7c2745051a87f3cd8946036d4036a9dc464868ad5d3a53"}, + {file = "rapidfuzz-3.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d920bead489ff2f02893261dd180f2c24afa4f0d00ec24b6b31311966e02f66f"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:82db5d39f9d87b639a23d700200fea379916114d5352e9574d1a11f82b9d8bca"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f2ecf1e23ee64a7cdce01be1350b977169ac866d27f7301a3add21993163b68"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f916630b388d465ab54bb205398abbb38b3f8eeed8f224accee534271ca52fba"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ecaa0619248cc0faa098cc8fa96a65296a9b734f2e8cd509a2cf1358041ae5"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5801f45fb585c21b6dbe08658a0d38e08ddca7b1ffb3825f39a59bb78998529"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a14ebea5e14b3b8c91c02be8adf9397247397f1f2be7e9cb6962ded9cc4d2cba"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b89591f5eb1c3a7509a5294bfd65b3eaca7ee7e0583bdd84122e2fc2e37e6973"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:029ae17bbe55671884702adc16225ca25ca447f86c1dba95b564fcd51eb82d44"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:23b278a7d8f2cdc1a54cf9b72a48f76dc9f1b68373f2a3299f8b6cc5e796c160"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8a5087f5c0b4e8584cd6044c279b4a6df15420a0074bf914e50bdebc9ac4db77"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a1d8640e1cad1757378a810c363695822462f331c40b23f30c1bbbc477d77c68"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2a3c44bafd6d919ccf3f36d454c3e70cafc4c1aa8557970befbb9ae4930e32d7"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:863c6ee97d156026bc1685fb7878c440833c221e935474e5b0ffb255b0888356"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-win32.whl", hash = "sha256:834d9bd0fca92fb5cd92a7027df5d0492c3d3c2111f365dc8168f5a2f2582a36"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:26c671bd3f2954df7e3d9635d5870177ba7d7433ec610bc3d4ba75648d89b9e9"}, + {file = "rapidfuzz-3.6.0-cp39-cp39-win_arm64.whl", hash = "sha256:f4378ad75d176c067dc9f79898a8b767305cfac97712f769859b118164852893"}, + {file = "rapidfuzz-3.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a5a76e488060fde32bfd2dc789821da95ca172a718752d0e65a2b168c7742612"}, + {file = "rapidfuzz-3.6.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:454ecaf9bd547b8a7ff8f461f5b25bd60ec15800ff2fab562e663732f53f0829"}, + {file = "rapidfuzz-3.6.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392effa32f235b2b30c9f00ece8002e21255fbbffa0ce0d4a1cbcbb88e02d019"}, + {file = "rapidfuzz-3.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad2c70768f83a1f202c191f0e4a3ef3d376659728a4602b22dc62bd7f118973"}, + {file = "rapidfuzz-3.6.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3fce2489072ae2362abeabdc7745a5b9eb0ff4291c7a69be2318025e1184f016"}, + {file = "rapidfuzz-3.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c715df0f21116e830c666d35ea413d528501c50079079ecaa5904ec244661992"}, + {file = "rapidfuzz-3.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be2e2b96ea1fcefe0a2ed2560fd33a510dc8afe8e93062e111b40d1cb0e34b6a"}, + {file = "rapidfuzz-3.6.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adbc4686e7252b97ef344fb9fb05080c8524ac2e77a20e835d166b8330024ac3"}, + {file = "rapidfuzz-3.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:306ca0358fc7d2e4660de3fce782bfea9c6bf443d60f9134ea8d4b8e8f1869e3"}, + {file = "rapidfuzz-3.6.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:db23d692ba27f58e5b97c724005e6a478168cb41af5d793bbc5478cb52842306"}, + {file = "rapidfuzz-3.6.0.tar.gz", hash = "sha256:4cdf564c3eeb2d95148bd7199e7869fa927f47cc3aea42f299aa836cfb2b6cfd"}, ] [package.extras] @@ -4523,99 +4318,104 @@ full = ["numpy"] [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -4703,22 +4503,6 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] -[[package]] -name = "rouge-score" -version = "0.1.2" -description = "Pure python implementation of ROUGE-1.5.5." -optional = false -python-versions = ">=3.7" -files = [ - {file = "rouge_score-0.1.2.tar.gz", hash = "sha256:c7d4da2683e68c9abf0135ef915d63a46643666f848e558a1b9f7ead17ff0f04"}, -] - -[package.dependencies] -absl-py = "*" -nltk = "*" -numpy = "*" -six = ">=1.14.0" - [[package]] name = "rsa" version = "4.9" @@ -4735,13 +4519,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.9.0" +version = "0.10.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.9.0-py3-none-any.whl", hash = "sha256:01d4d2c35a016db8cb14f9a4d5e84c1f8c96e7ffc211422555eed45c11fa7eb1"}, - {file = "s3transfer-0.9.0.tar.gz", hash = "sha256:9e1b186ec8bb5907a1e82b51237091889a9973a2bb799a924bcd9f301ff79d3d"}, + {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, + {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, ] [package.dependencies] @@ -5065,60 +4849,15 @@ files = [ {file = "sentencepiece-0.1.99.tar.gz", hash = "sha256:189c48f5cb2949288f97ccdb97f0473098d9c3dcf5a3d99d4eabe719ec27297f"}, ] -[[package]] -name = "sentry-sdk" -version = "1.39.1" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = "*" -files = [ - {file = "sentry-sdk-1.39.1.tar.gz", hash = "sha256:320a55cdf9da9097a0bead239c35b7e61f53660ef9878861824fd6d9b2eaf3b5"}, - {file = "sentry_sdk-1.39.1-py2.py3-none-any.whl", hash = "sha256:81b5b9ffdd1a374e9eb0c053b5d2012155db9cbe76393a8585677b753bd5fdc1"}, -] - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -loguru = ["loguru (>=0.5)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] - [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -5871,17 +5610,6 @@ dev = ["tokenizers[testing]"] docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "tomlkit" version = "0.12.3" @@ -5992,13 +5720,13 @@ telegram = ["requests"] [[package]] name = "transformers" -version = "4.36.1" +version = "4.36.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" files = [ - {file = "transformers-4.36.1-py3-none-any.whl", hash = "sha256:0e309d03634885f02d46801ec4f2c3fc1d614a5b9ebde608181f3e842bac53b8"}, - {file = "transformers-4.36.1.tar.gz", hash = "sha256:28e55952d9bed68f06cf45a3d29cc480679b528afe944e68f8cf6c799e428759"}, + {file = "transformers-4.36.2-py3-none-any.whl", hash = "sha256:462066c4f74ee52516f12890dcc9ec71d1a5e97998db621668455117a54330f6"}, + {file = "transformers-4.36.2.tar.gz", hash = "sha256:d8068e897e47793281501e547d2bbdfc5b8556409c2cb6c3d9e2ca77d4c0b4ec"}, ] [package.dependencies] @@ -6343,24 +6071,23 @@ wasabi = ">=0.9.1,<1.2.0" [[package]] name = "weaviate-client" -version = "4.0b1" +version = "4.4b1" description = "A python native Weaviate client" optional = false python-versions = ">=3.8" files = [ - {file = "weaviate-client-4.0b1.tar.gz", hash = "sha256:db7ab8f50ab0a83f52d63bf0a9f9179bc4f47620393a2e2da528a0c6373f75b3"}, - {file = "weaviate_client-4.0b1-py3-none-any.whl", hash = "sha256:a453d26026427e186e49d9119017749e90c037c47526798693118632dfcf0554"}, + {file = "weaviate-client-4.4b1.tar.gz", hash = "sha256:f0b766dcf9230e798b8f97172a428510fe1e48bd9de32fd2167f8550a1ad5910"}, + {file = "weaviate_client-4.4b1-py3-none-any.whl", hash = "sha256:7bf8bf58b191db6605bd62aca9f9fa8bfe3447a4570aaaa53af34004efb65ce2"}, ] [package.dependencies] authlib = ">=1.2.1,<2.0.0" +grpcio = ">=1.57.0,<2.0.0" +grpcio-tools = ">=1.57.0,<2.0.0" pydantic = ">=2.1.1,<3.0.0" requests = ">=2.30.0,<3.0.0" validators = ">=0.21.2,<1.0.0" -[package.extras] -grpc = ["grpcio (>=1.57.0,<2.0.0)", "grpcio-tools (>=1.57.0,<2.0.0)"] - [[package]] name = "werkzeug" version = "3.0.1" @@ -6485,123 +6212,6 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "xxhash" -version = "3.4.1" -description = "Python binding for xxHash" -optional = false -python-versions = ">=3.7" -files = [ - {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, - {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, - {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, - {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, - {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, - {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, - {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, - {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, - {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, - {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, - {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, - {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, - {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, - {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, - {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, - {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, - {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, - {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, - {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, - {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, -] - [[package]] name = "yarl" version = "1.9.4" @@ -6723,4 +6333,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1794d169cd2ebd8d53e87f72475704b2eb2aadf24878f4677bdd0df87e7db0e1" +content-hash = "fd1827c12c55037c200a399460ca9d17da4c6c1326eb275d18d9bb99cfbd60f3" diff --git a/level_4/pyproject.toml b/pyproject.toml similarity index 96% rename from level_4/pyproject.toml rename to pyproject.toml index 18f80ad3d..84eeca667 100644 --- a/level_4/pyproject.toml +++ b/pyproject.toml @@ -38,9 +38,8 @@ pypdf = "^3.12.0" fastjsonschema = "^2.18.0" marvin = "^1.3.0" dlt = { version ="^0.3.8", extras = ["duckdb"]} -weaviate-client = "4.*" +weaviate-client = "4.4b1" python-multipart = "^0.0.6" -deepeval = "^0.20.12" pymupdf = "^1.23.3" psycopg2 = "^2.9.8" llama-index = "^0.8.39.post2" @@ -59,6 +58,8 @@ graphviz = "^0.20.1" greenlet = "^3.0.1" neo4j = "^5.14.1" grpcio = "^1.60.0" +langdetect = "^1.0.9" +iso639 = "^0.1.4"