diff --git a/.devcontainer/post_start_command.sh b/.devcontainer/post_start_command.sh index 633da9649cc9ed..e3d5a6d59d746e 100755 --- a/.devcontainer/post_start_command.sh +++ b/.devcontainer/post_start_command.sh @@ -1,3 +1,3 @@ #!/bin/bash -cd api && pip install -r requirements.txt \ No newline at end of file +poetry install -C api \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index b596bdb6b0d2a0..ab7118f04e7213 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -20,7 +20,7 @@ body: - type: input attributes: label: Dify version - placeholder: 0.3.21 + placeholder: 0.6.11 description: See about section in Dify console validations: required: true @@ -40,7 +40,7 @@ body: - type: textarea attributes: label: Steps to reproduce - description: We highly suggest including screenshots and a bug report log. + description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks. placeholder: Having detailed steps helps us reproduce the bug. validations: required: true diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 8730f5c11f80ee..3fa3b513c6f07b 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -16,31 +16,19 @@ body: required: true - type: textarea attributes: - label: 1. Is this request related to a challenge you're experiencing? + label: 1. Is this request related to a challenge you're experiencing? Tell me about your story. placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...." validations: required: true - type: textarea attributes: - label: 2. Describe the feature you'd like to see - placeholder: Think about what you want to achieve and how this feature will help you. Sketches, flow diagrams, or any visual representation will be a major plus. - validations: - required: true - - type: textarea - attributes: - label: 3. How will this feature improve your workflow or experience? - placeholder: Tell us how this change will benefit your work. This helps us prioritize based on user impact. - validations: - required: true - - type: textarea - attributes: - label: 4. Additional context or comments + label: 2. Additional context or comments placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.) validations: required: false - type: checkboxes attributes: - label: 5. Can you help us with this feature? + label: 3. Can you help us with this feature? description: Let us know! This is not a commitment, but a starting point for collaboration. options: - label: I am interested in contributing to this feature. diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 4af4daadeba3a8..4b75f886fdb292 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -14,74 +14,6 @@ concurrency: jobs: test: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: - - "3.10" - - "3.11" - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: | - ./api/requirements.txt - ./api/requirements-dev.txt - - - name: Install dependencies - run: pip install -r ./api/requirements.txt -r ./api/requirements-dev.txt - - - name: Run Unit tests - run: dev/pytest/pytest_unit_tests.sh - - - name: Run ModelRuntime - run: dev/pytest/pytest_model_runtime.sh - - - name: Run Tool - run: dev/pytest/pytest_tools.sh - - - name: Set up Sandbox - uses: hoverkraft-tech/compose-action@v2.0.0 - with: - compose-file: | - docker/docker-compose.middleware.yaml - services: | - sandbox - ssrf_proxy - - - name: Run Workflow - run: dev/pytest/pytest_workflow.sh - - - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma) - uses: hoverkraft-tech/compose-action@v2.0.0 - with: - compose-file: | - docker/docker-compose.middleware.yaml - docker/docker-compose.qdrant.yaml - docker/docker-compose.milvus.yaml - docker/docker-compose.pgvecto-rs.yaml - docker/docker-compose.pgvector.yaml - docker/docker-compose.chroma.yaml - services: | - weaviate - qdrant - etcd - minio - milvus-standalone - pgvecto-rs - pgvector - chroma - - - name: Test Vector Stores - run: dev/pytest/pytest_vdb.sh - - test-in-poetry: name: API Tests runs-on: ubuntu-latest strategy: @@ -108,7 +40,7 @@ jobs: - name: Poetry check run: | - poetry check -C api + poetry check -C api --lock poetry show -C api - name: Install dependencies @@ -145,6 +77,7 @@ jobs: docker/docker-compose.pgvecto-rs.yaml docker/docker-compose.pgvector.yaml docker/docker-compose.chroma.yaml + docker/docker-compose.oracle.yaml services: | weaviate qdrant @@ -154,6 +87,7 @@ jobs: pgvecto-rs pgvector chroma + oracle - name: Test Vector Stores run: poetry run -C api bash dev/pytest/pytest_vdb.sh diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 89d301c4fd8a6e..2678f23a770acd 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -8,6 +8,10 @@ on: release: types: [published] +concurrency: + group: build-push-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + env: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -15,19 +19,35 @@ env: DIFY_API_IMAGE_NAME: ${{ vars.DIFY_API_IMAGE_NAME || 'langgenius/dify-api' }} jobs: - build-and-push: - runs-on: ubuntu-latest + build: + runs-on: ${{ matrix.platform == 'linux/arm64' && 'arm64_runner' || 'ubuntu-latest' }} if: github.repository == 'langgenius/dify' strategy: matrix: include: - - service_name: "web" - image_name_env: "DIFY_WEB_IMAGE_NAME" - context: "web" - - service_name: "api" + - service_name: "build-api-amd64" + image_name_env: "DIFY_API_IMAGE_NAME" + context: "api" + platform: linux/amd64 + - service_name: "build-api-arm64" image_name_env: "DIFY_API_IMAGE_NAME" context: "api" + platform: linux/arm64 + - service_name: "build-web-amd64" + image_name_env: "DIFY_WEB_IMAGE_NAME" + context: "web" + platform: linux/amd64 + - service_name: "build-web-arm64" + image_name_env: "DIFY_WEB_IMAGE_NAME" + context: "web" + platform: linux/arm64 + steps: + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + - name: Set up QEMU uses: docker/setup-qemu-action@v3 @@ -40,7 +60,66 @@ jobs: username: ${{ env.DOCKERHUB_USER }} password: ${{ env.DOCKERHUB_TOKEN }} - - name: Extract metadata (tags, labels) for Docker + - name: Extract metadata for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env[matrix.image_name_env] }} + + - name: Build Docker image + id: build + uses: docker/build-push-action@v6 + with: + context: "{{defaultContext}}:${{ matrix.context }}" + platforms: ${{ matrix.platform }} + build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true + cache-from: type=gha,scope=${{ matrix.service_name }} + cache-to: type=gha,mode=max,scope=${{ matrix.service_name }} + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + create-manifest: + needs: build + runs-on: ubuntu-latest + if: github.repository == 'langgenius/dify' + strategy: + matrix: + include: + - service_name: "merge-api-images" + image_name_env: "DIFY_API_IMAGE_NAME" + context: "api" + - service_name: "merge-web-images" + image_name_env: "DIFY_WEB_IMAGE_NAME" + context: "web" + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: digests-${{ matrix.context }}-* + merge-multiple: true + + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ env.DOCKERHUB_USER }} + password: ${{ env.DOCKERHUB_TOKEN }} + + - name: Extract metadata for Docker id: meta uses: docker/metadata-action@v5 with: @@ -51,14 +130,12 @@ jobs: type=sha,enable=true,priority=100,prefix=,suffix=,format=long type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }} - - name: Build and push - uses: docker/build-push-action@v5 - with: - context: "{{defaultContext}}:${{ matrix.context }}" - platforms: ${{ startsWith(github.ref, 'refs/tags/') && 'linux/amd64,linux/arm64' || 'linux/amd64' }} - build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }} - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index cc4302117f80f9..f6092c86337d85 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -99,7 +99,7 @@ jobs: **.sh **.yaml **.yml - Dockerfile + **Dockerfile dev/** - name: Super-linter @@ -113,7 +113,8 @@ jobs: IGNORE_GITIGNORED_FILES: true VALIDATE_BASH: true VALIDATE_BASH_EXEC: true - VALIDATE_GITHUB_ACTIONS: true + # FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck + # VALIDATE_GITHUB_ACTIONS: true VALIDATE_DOCKERFILE_HADOLINT: true VALIDATE_XML: true VALIDATE_YAML: true diff --git a/api/.dockerignore b/api/.dockerignore index 0ee003b912310a..91a5254ea7e151 100644 --- a/api/.dockerignore +++ b/api/.dockerignore @@ -8,4 +8,7 @@ logs *.log* # jetbrains -.idea \ No newline at end of file +.idea + +# venv +.venv \ No newline at end of file diff --git a/api/Dockerfile b/api/Dockerfile index 96b230e173b42a..15fd9d88e080f8 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,18 +1,28 @@ # base image -FROM python:3.10-slim-bookworm AS base +FROM python:3.10-slim-bookworm as base -LABEL maintainer="takatost@gmail.com" +WORKDIR /app/api + +# Install Poetry +ENV POETRY_VERSION=1.8.3 +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir --upgrade poetry==${POETRY_VERSION} + +# Configure Poetry +ENV POETRY_CACHE_DIR=/tmp/poetry_cache +ENV POETRY_NO_INTERACTION=1 +ENV POETRY_VIRTUALENVS_IN_PROJECT=true +ENV POETRY_VIRTUALENVS_CREATE=true -# install packages FROM base as packages RUN apt-get update \ && apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev -COPY requirements.txt /requirements.txt +# Install Python dependencies +COPY pyproject.toml poetry.lock ./ +RUN poetry install --sync --no-cache --no-root -RUN --mount=type=cache,target=/root/.cache/pip \ - pip install --prefix=/pkg -r requirements.txt # production stage FROM base AS production @@ -37,13 +47,20 @@ RUN apt-get update \ && apt-get autoremove \ && rm -rf /var/lib/apt/lists/* -COPY --from=packages /pkg /usr/local +# Copy Python environment and packages +ENV VIRTUAL_ENV=/app/api/.venv +COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV} +ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" + +# Copy source code COPY . /app/api/ +# Copy entrypoint COPY docker/entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh + ARG COMMIT_SHA ENV COMMIT_SHA ${COMMIT_SHA} -ENTRYPOINT ["/bin/bash", "/entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["/bin/bash", "/entrypoint.sh"] diff --git a/api/README.md b/api/README.md index f49f2cf6d8332d..5f71dbe5f07f7b 100644 --- a/api/README.md +++ b/api/README.md @@ -2,6 +2,9 @@ ## Usage +> [!IMPORTANT] +> In the v0.6.12 release, we deprecated `pip` as the package management tool for Dify API Backend service and replaced it with `poetry`. + 1. Start the docker-compose stack The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`. @@ -29,8 +32,6 @@ Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment. - > Using pip can be found [below](#usage-with-pip). - 5. Install dependencies ```bash @@ -84,63 +85,3 @@ cd ../ poetry run -C api bash dev/pytest/pytest_all_tests.sh ``` - -## Usage with pip - -> [!NOTE] -> In the next version, we will deprecate pip as the primary package management tool for dify api service, currently Poetry and pip coexist. - -1. Start the docker-compose stack - - The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`. - - ```bash - cd ../docker - docker-compose -f docker-compose.middleware.yaml -p dify up -d - cd ../api - ``` - -2. Copy `.env.example` to `.env` -3. Generate a `SECRET_KEY` in the `.env` file. - - ```bash - sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env - ``` - -4. Create environment. - - If you use Anaconda, create a new environment and activate it - - ```bash - conda create --name dify python=3.10 - conda activate dify - ``` - -5. Install dependencies - - ```bash - pip install -r requirements.txt - ``` - -6. Run migrate - - Before the first launch, migrate the database to the latest version. - - ```bash - flask db upgrade - ``` - -7. Start backend: - - ```bash - flask run --host 0.0.0.0 --port=5001 --debug - ``` - -8. Setup your application by visiting or other apis... -9. If you need to debug local async processing, please start the worker service. - - ```bash - celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail - ``` - - The started celery app handles the async tasks, e.g. dataset importing and documents indexing. diff --git a/api/app.py b/api/app.py index 52461aac9304e9..40a90fdfa7f546 100644 --- a/api/app.py +++ b/api/app.py @@ -1,8 +1,8 @@ import os -from configs.app_configs import DifyConfigs +from configs.app_config import DifyConfig -if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true': +if not os.environ.get("DEBUG") or os.environ.get("DEBUG", "false").lower() != 'true': from gevent import monkey monkey.patch_all() @@ -83,7 +83,7 @@ def create_flask_app_with_configs() -> Flask: """ dify_app = DifyApp(__name__) dify_app.config.from_object(Config()) - dify_app.config.from_mapping(DifyConfigs().model_dump()) + dify_app.config.from_mapping(DifyConfig().model_dump()) return dify_app @@ -152,27 +152,26 @@ def initialize_extensions(app): @login_manager.request_loader def load_user_from_request(request_from_flask_login): """Load user based on the request.""" - if request.blueprint in ['console', 'inner_api']: - # Check if the user_id contains a dot, indicating the old format - auth_header = request.headers.get('Authorization', '') - if not auth_header: - auth_token = request.args.get('_token') - if not auth_token: - raise Unauthorized('Invalid Authorization token.') - else: - if ' ' not in auth_header: - raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.') - auth_scheme, auth_token = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - if auth_scheme != 'bearer': - raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.') - - decoded = PassportService().verify(auth_token) - user_id = decoded.get('user_id') - - return AccountService.load_user(user_id) - else: + if request.blueprint not in ['console', 'inner_api']: return None + # Check if the user_id contains a dot, indicating the old format + auth_header = request.headers.get('Authorization', '') + if not auth_header: + auth_token = request.args.get('_token') + if not auth_token: + raise Unauthorized('Invalid Authorization token.') + else: + if ' ' not in auth_header: + raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.') + auth_scheme, auth_token = auth_header.split(None, 1) + auth_scheme = auth_scheme.lower() + if auth_scheme != 'bearer': + raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.') + + decoded = PassportService().verify(auth_token) + user_id = decoded.get('user_id') + + return AccountService.load_logged_in_account(account_id=user_id, token=auth_token) @login_manager.unauthorized_handler diff --git a/api/config.py b/api/config.py index 4aa2de3aad49af..2e722c7009e6f1 100644 --- a/api/config.py +++ b/api/config.py @@ -3,26 +3,6 @@ import dotenv DEFAULTS = { - 'DB_USERNAME': 'postgres', - 'DB_PASSWORD': '', - 'DB_HOST': 'localhost', - 'DB_PORT': '5432', - 'DB_DATABASE': 'dify', - 'DB_CHARSET': '', - 'S3_USE_AWS_MANAGED_IAM': 'False', - 'S3_ADDRESS_STYLE': 'auto', - 'SQLALCHEMY_DATABASE_URI_SCHEME': 'postgresql', - 'SQLALCHEMY_POOL_SIZE': 30, - 'SQLALCHEMY_MAX_OVERFLOW': 10, - 'SQLALCHEMY_POOL_RECYCLE': 3600, - 'SQLALCHEMY_POOL_PRE_PING': 'False', - 'SQLALCHEMY_ECHO': 'False', - 'WEAVIATE_GRPC_ENABLED': 'True', - 'WEAVIATE_BATCH_SIZE': 100, - 'QDRANT_CLIENT_TIMEOUT': 20, - 'QDRANT_GRPC_ENABLED': 'False', - 'QDRANT_GRPC_PORT': '6334', - 'CELERY_BACKEND': 'database', 'HOSTED_OPENAI_QUOTA_LIMIT': 200, 'HOSTED_OPENAI_TRIAL_ENABLED': 'False', 'HOSTED_OPENAI_TRIAL_MODELS': 'gpt-3.5-turbo,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,text-davinci-003', @@ -37,7 +17,6 @@ 'HOSTED_MODERATION_PROVIDERS': '', 'HOSTED_FETCH_APP_TEMPLATES_MODE': 'remote', 'HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN': 'https://tmpl.dify.ai', - 'MILVUS_DATABASE': 'default', } @@ -75,150 +54,6 @@ def __init__(self): self.WEB_API_CORS_ALLOW_ORIGINS = get_cors_allow_origins( 'WEB_API_CORS_ALLOW_ORIGINS', '*') - # ------------------------ - # Database Configurations. - # ------------------------ - db_credentials = { - key: get_env(key) for key in - ['DB_USERNAME', 'DB_PASSWORD', 'DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_CHARSET'] - } - self.SQLALCHEMY_DATABASE_URI_SCHEME = get_env('SQLALCHEMY_DATABASE_URI_SCHEME') - - db_extras = f"?client_encoding={db_credentials['DB_CHARSET']}" if db_credentials['DB_CHARSET'] else "" - - self.SQLALCHEMY_DATABASE_URI = f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://{db_credentials['DB_USERNAME']}:{db_credentials['DB_PASSWORD']}@{db_credentials['DB_HOST']}:{db_credentials['DB_PORT']}/{db_credentials['DB_DATABASE']}{db_extras}" - self.SQLALCHEMY_ENGINE_OPTIONS = { - 'pool_size': int(get_env('SQLALCHEMY_POOL_SIZE')), - 'max_overflow': int(get_env('SQLALCHEMY_MAX_OVERFLOW')), - 'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE')), - 'pool_pre_ping': get_bool_env('SQLALCHEMY_POOL_PRE_PING'), - 'connect_args': {'options': '-c timezone=UTC'}, - } - - self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO') - - # ------------------------ - # Celery worker Configurations. - # ------------------------ - self.CELERY_BROKER_URL = get_env('CELERY_BROKER_URL') - self.CELERY_BACKEND = get_env('CELERY_BACKEND') - self.CELERY_RESULT_BACKEND = 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \ - if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL - self.BROKER_USE_SSL = self.CELERY_BROKER_URL.startswith('rediss://') if self.CELERY_BROKER_URL else False - - - # S3 Storage settings - self.S3_USE_AWS_MANAGED_IAM = get_bool_env('S3_USE_AWS_MANAGED_IAM') - self.S3_ENDPOINT = get_env('S3_ENDPOINT') - self.S3_BUCKET_NAME = get_env('S3_BUCKET_NAME') - self.S3_ACCESS_KEY = get_env('S3_ACCESS_KEY') - self.S3_SECRET_KEY = get_env('S3_SECRET_KEY') - self.S3_REGION = get_env('S3_REGION') - self.S3_ADDRESS_STYLE = get_env('S3_ADDRESS_STYLE') - - # Azure Blob Storage settings - self.AZURE_BLOB_ACCOUNT_NAME = get_env('AZURE_BLOB_ACCOUNT_NAME') - self.AZURE_BLOB_ACCOUNT_KEY = get_env('AZURE_BLOB_ACCOUNT_KEY') - self.AZURE_BLOB_CONTAINER_NAME = get_env('AZURE_BLOB_CONTAINER_NAME') - self.AZURE_BLOB_ACCOUNT_URL = get_env('AZURE_BLOB_ACCOUNT_URL') - - # Aliyun Storage settings - self.ALIYUN_OSS_BUCKET_NAME = get_env('ALIYUN_OSS_BUCKET_NAME') - self.ALIYUN_OSS_ACCESS_KEY = get_env('ALIYUN_OSS_ACCESS_KEY') - self.ALIYUN_OSS_SECRET_KEY = get_env('ALIYUN_OSS_SECRET_KEY') - self.ALIYUN_OSS_ENDPOINT = get_env('ALIYUN_OSS_ENDPOINT') - self.ALIYUN_OSS_REGION = get_env('ALIYUN_OSS_REGION') - self.ALIYUN_OSS_AUTH_VERSION = get_env('ALIYUN_OSS_AUTH_VERSION') - - # Google Cloud Storage settings - self.GOOGLE_STORAGE_BUCKET_NAME = get_env('GOOGLE_STORAGE_BUCKET_NAME') - self.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 = get_env('GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64') - - # Tencent Cos Storage settings - self.TENCENT_COS_BUCKET_NAME = get_env('TENCENT_COS_BUCKET_NAME') - self.TENCENT_COS_REGION = get_env('TENCENT_COS_REGION') - self.TENCENT_COS_SECRET_ID = get_env('TENCENT_COS_SECRET_ID') - self.TENCENT_COS_SECRET_KEY = get_env('TENCENT_COS_SECRET_KEY') - self.TENCENT_COS_SCHEME = get_env('TENCENT_COS_SCHEME') - - # ------------------------ - # Vector Store Configurations. - # Currently, only support: qdrant, milvus, zilliz, weaviate, relyt, pgvector - # ------------------------ - - # qdrant settings - self.QDRANT_URL = get_env('QDRANT_URL') - self.QDRANT_API_KEY = get_env('QDRANT_API_KEY') - self.QDRANT_CLIENT_TIMEOUT = get_env('QDRANT_CLIENT_TIMEOUT') - self.QDRANT_GRPC_ENABLED = get_env('QDRANT_GRPC_ENABLED') - self.QDRANT_GRPC_PORT = get_env('QDRANT_GRPC_PORT') - - # milvus / zilliz setting - self.MILVUS_HOST = get_env('MILVUS_HOST') - self.MILVUS_PORT = get_env('MILVUS_PORT') - self.MILVUS_USER = get_env('MILVUS_USER') - self.MILVUS_PASSWORD = get_env('MILVUS_PASSWORD') - self.MILVUS_SECURE = get_env('MILVUS_SECURE') - self.MILVUS_DATABASE = get_env('MILVUS_DATABASE') - - # OpenSearch settings - self.OPENSEARCH_HOST = get_env('OPENSEARCH_HOST') - self.OPENSEARCH_PORT = get_env('OPENSEARCH_PORT') - self.OPENSEARCH_USER = get_env('OPENSEARCH_USER') - self.OPENSEARCH_PASSWORD = get_env('OPENSEARCH_PASSWORD') - self.OPENSEARCH_SECURE = get_bool_env('OPENSEARCH_SECURE') - - # weaviate settings - self.WEAVIATE_ENDPOINT = get_env('WEAVIATE_ENDPOINT') - self.WEAVIATE_API_KEY = get_env('WEAVIATE_API_KEY') - self.WEAVIATE_GRPC_ENABLED = get_bool_env('WEAVIATE_GRPC_ENABLED') - self.WEAVIATE_BATCH_SIZE = int(get_env('WEAVIATE_BATCH_SIZE')) - - # relyt settings - self.RELYT_HOST = get_env('RELYT_HOST') - self.RELYT_PORT = get_env('RELYT_PORT') - self.RELYT_USER = get_env('RELYT_USER') - self.RELYT_PASSWORD = get_env('RELYT_PASSWORD') - self.RELYT_DATABASE = get_env('RELYT_DATABASE') - - # tencent settings - self.TENCENT_VECTOR_DB_URL = get_env('TENCENT_VECTOR_DB_URL') - self.TENCENT_VECTOR_DB_API_KEY = get_env('TENCENT_VECTOR_DB_API_KEY') - self.TENCENT_VECTOR_DB_TIMEOUT = get_env('TENCENT_VECTOR_DB_TIMEOUT') - self.TENCENT_VECTOR_DB_USERNAME = get_env('TENCENT_VECTOR_DB_USERNAME') - self.TENCENT_VECTOR_DB_DATABASE = get_env('TENCENT_VECTOR_DB_DATABASE') - self.TENCENT_VECTOR_DB_SHARD = get_env('TENCENT_VECTOR_DB_SHARD') - self.TENCENT_VECTOR_DB_REPLICAS = get_env('TENCENT_VECTOR_DB_REPLICAS') - - # pgvecto rs settings - self.PGVECTO_RS_HOST = get_env('PGVECTO_RS_HOST') - self.PGVECTO_RS_PORT = get_env('PGVECTO_RS_PORT') - self.PGVECTO_RS_USER = get_env('PGVECTO_RS_USER') - self.PGVECTO_RS_PASSWORD = get_env('PGVECTO_RS_PASSWORD') - self.PGVECTO_RS_DATABASE = get_env('PGVECTO_RS_DATABASE') - - # pgvector settings - self.PGVECTOR_HOST = get_env('PGVECTOR_HOST') - self.PGVECTOR_PORT = get_env('PGVECTOR_PORT') - self.PGVECTOR_USER = get_env('PGVECTOR_USER') - self.PGVECTOR_PASSWORD = get_env('PGVECTOR_PASSWORD') - self.PGVECTOR_DATABASE = get_env('PGVECTOR_DATABASE') - - # tidb-vector settings - self.TIDB_VECTOR_HOST = get_env('TIDB_VECTOR_HOST') - self.TIDB_VECTOR_PORT = get_env('TIDB_VECTOR_PORT') - self.TIDB_VECTOR_USER = get_env('TIDB_VECTOR_USER') - self.TIDB_VECTOR_PASSWORD = get_env('TIDB_VECTOR_PASSWORD') - self.TIDB_VECTOR_DATABASE = get_env('TIDB_VECTOR_DATABASE') - - # chroma settings - self.CHROMA_HOST = get_env('CHROMA_HOST') - self.CHROMA_PORT = get_env('CHROMA_PORT') - self.CHROMA_TENANT = get_env('CHROMA_TENANT') - self.CHROMA_DATABASE = get_env('CHROMA_DATABASE') - self.CHROMA_AUTH_PROVIDER = get_env('CHROMA_AUTH_PROVIDER') - self.CHROMA_AUTH_CREDENTIALS = get_env('CHROMA_AUTH_CREDENTIALS') - # ------------------------ # Platform Configurations. # ------------------------ diff --git a/api/configs/app_configs.py b/api/configs/app_config.py similarity index 63% rename from api/configs/app_configs.py rename to api/configs/app_config.py index 96352e43d0e2ab..4467b84c8666e6 100644 --- a/api/configs/app_configs.py +++ b/api/configs/app_config.py @@ -1,14 +1,14 @@ from pydantic_settings import BaseSettings, SettingsConfigDict -from configs.deploy import DeploymentConfigs -from configs.enterprise import EnterpriseFeatureConfigs -from configs.extra import ExtraServiceConfigs -from configs.feature import FeatureConfigs -from configs.middleware import MiddlewareConfigs +from configs.deploy import DeploymentConfig +from configs.enterprise import EnterpriseFeatureConfig +from configs.extra import ExtraServiceConfig +from configs.feature import FeatureConfig +from configs.middleware import MiddlewareConfig from configs.packaging import PackagingInfo -class DifyConfigs( +class DifyConfig( # based on pydantic-settings BaseSettings, @@ -16,20 +16,20 @@ class DifyConfigs( PackagingInfo, # Deployment configs - DeploymentConfigs, + DeploymentConfig, # Feature configs - FeatureConfigs, + FeatureConfig, # Middleware configs - MiddlewareConfigs, + MiddlewareConfig, # Extra service configs - ExtraServiceConfigs, + ExtraServiceConfig, # Enterprise feature configs # **Before using, please contact business@dify.ai by email to inquire about licensing matters.** - EnterpriseFeatureConfigs, + EnterpriseFeatureConfig, ): model_config = SettingsConfigDict( diff --git a/api/configs/deploy/__init__.py b/api/configs/deploy/__init__.py index 9a1c1b84d7cfb7..f7b118201fe24f 100644 --- a/api/configs/deploy/__init__.py +++ b/api/configs/deploy/__init__.py @@ -1,7 +1,7 @@ from pydantic import BaseModel, Field -class DeploymentConfigs(BaseModel): +class DeploymentConfig(BaseModel): """ Deployment configs """ diff --git a/api/configs/enterprise/__init__.py b/api/configs/enterprise/__init__.py index 37164ba2e3177f..39983036eb794f 100644 --- a/api/configs/enterprise/__init__.py +++ b/api/configs/enterprise/__init__.py @@ -1,7 +1,7 @@ from pydantic import BaseModel, Field -class EnterpriseFeatureConfigs(BaseModel): +class EnterpriseFeatureConfig(BaseModel): """ Enterprise feature configs. **Before using, please contact business@dify.ai by email to inquire about licensing matters.** diff --git a/api/configs/extra/__init__.py b/api/configs/extra/__init__.py index f248fcb47a4961..358c12d63a3f86 100644 --- a/api/configs/extra/__init__.py +++ b/api/configs/extra/__init__.py @@ -1,12 +1,12 @@ from pydantic import BaseModel -from configs.extra.notion_configs import NotionConfigs -from configs.extra.sentry_configs import SentryConfigs +from configs.extra.notion_config import NotionConfig +from configs.extra.sentry_config import SentryConfig -class ExtraServiceConfigs( +class ExtraServiceConfig( # place the configs in alphabet order - NotionConfigs, - SentryConfigs, + NotionConfig, + SentryConfig, ): pass diff --git a/api/configs/extra/notion_configs.py b/api/configs/extra/notion_config.py similarity index 96% rename from api/configs/extra/notion_configs.py rename to api/configs/extra/notion_config.py index da96b3c8c57296..f8df28cefdeb1c 100644 --- a/api/configs/extra/notion_configs.py +++ b/api/configs/extra/notion_config.py @@ -3,7 +3,7 @@ from pydantic import BaseModel, Field -class NotionConfigs(BaseModel): +class NotionConfig(BaseModel): """ Notion integration configs """ diff --git a/api/configs/extra/sentry_configs.py b/api/configs/extra/sentry_config.py similarity index 61% rename from api/configs/extra/sentry_configs.py rename to api/configs/extra/sentry_config.py index 7f19b347d4afaf..8cdb8cf45a35fa 100644 --- a/api/configs/extra/sentry_configs.py +++ b/api/configs/extra/sentry_config.py @@ -1,9 +1,9 @@ from typing import Optional -from pydantic import BaseModel, Field, PositiveFloat +from pydantic import BaseModel, Field, NonNegativeFloat -class SentryConfigs(BaseModel): +class SentryConfig(BaseModel): """ Sentry configs """ @@ -12,12 +12,12 @@ class SentryConfigs(BaseModel): default=None, ) - SENTRY_TRACES_SAMPLE_RATE: PositiveFloat = Field( + SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field( description='Sentry trace sample rate', default=1.0, ) - SENTRY_PROFILES_SAMPLE_RATE: PositiveFloat = Field( + SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field( description='Sentry profiles sample rate', default=1.0, ) diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index e820761b06d8c4..4d7d0dcd1eb350 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -3,7 +3,7 @@ from pydantic import AliasChoices, BaseModel, Field, NonNegativeInt, PositiveInt -class SecurityConfigs(BaseModel): +class SecurityConfig(BaseModel): """ Secret Key configs """ @@ -16,7 +16,7 @@ class SecurityConfigs(BaseModel): ) -class AppExecutionConfigs(BaseModel): +class AppExecutionConfig(BaseModel): """ App Execution configs """ @@ -26,7 +26,7 @@ class AppExecutionConfigs(BaseModel): ) -class CodeExecutionSandboxConfigs(BaseModel): +class CodeExecutionSandboxConfig(BaseModel): """ Code Execution Sandbox configs """ @@ -41,7 +41,7 @@ class CodeExecutionSandboxConfigs(BaseModel): ) -class EndpointConfigs(BaseModel): +class EndpointConfig(BaseModel): """ Module URL configs """ @@ -70,7 +70,7 @@ class EndpointConfigs(BaseModel): ) -class FileAccessConfigs(BaseModel): +class FileAccessConfig(BaseModel): """ File Access configs """ @@ -89,7 +89,7 @@ class FileAccessConfigs(BaseModel): ) -class FileUploadConfigs(BaseModel): +class FileUploadConfig(BaseModel): """ File Uploading configs """ @@ -114,7 +114,7 @@ class FileUploadConfigs(BaseModel): ) -class HttpConfigs(BaseModel): +class HttpConfig(BaseModel): """ HTTP configs """ @@ -124,7 +124,7 @@ class HttpConfigs(BaseModel): ) -class InnerAPIConfigs(BaseModel): +class InnerAPIConfig(BaseModel): """ Inner API configs """ @@ -139,7 +139,7 @@ class InnerAPIConfigs(BaseModel): ) -class LoggingConfigs(BaseModel): +class LoggingConfig(BaseModel): """ Logging configs """ @@ -171,7 +171,7 @@ class LoggingConfigs(BaseModel): ) -class ModelLoadBalanceConfigs(BaseModel): +class ModelLoadBalanceConfig(BaseModel): """ Model load balance configs """ @@ -181,7 +181,7 @@ class ModelLoadBalanceConfigs(BaseModel): ) -class BillingConfigs(BaseModel): +class BillingConfig(BaseModel): """ Platform Billing Configurations """ @@ -191,7 +191,7 @@ class BillingConfigs(BaseModel): ) -class UpdateConfigs(BaseModel): +class UpdateConfig(BaseModel): """ Update configs """ @@ -201,7 +201,7 @@ class UpdateConfigs(BaseModel): ) -class WorkflowConfigs(BaseModel): +class WorkflowConfig(BaseModel): """ Workflow feature configs """ @@ -222,7 +222,7 @@ class WorkflowConfigs(BaseModel): ) -class OAuthConfigs(BaseModel): +class OAuthConfig(BaseModel): """ oauth configs """ @@ -252,7 +252,7 @@ class OAuthConfigs(BaseModel): ) -class ModerationConfigs(BaseModel): +class ModerationConfig(BaseModel): """ Moderation in app configs. """ @@ -264,7 +264,7 @@ class ModerationConfigs(BaseModel): ) -class ToolConfigs(BaseModel): +class ToolConfig(BaseModel): """ Tool configs """ @@ -275,7 +275,7 @@ class ToolConfigs(BaseModel): ) -class MailConfigs(BaseModel): +class MailConfig(BaseModel): """ Mail Configurations """ @@ -331,7 +331,7 @@ class MailConfigs(BaseModel): ) -class RagEtlConfigs(BaseModel): +class RagEtlConfig(BaseModel): """ RAG ETL Configurations. """ @@ -357,7 +357,7 @@ class RagEtlConfigs(BaseModel): ) -class DataSetConfigs(BaseModel): +class DataSetConfig(BaseModel): """ Dataset configs """ @@ -368,7 +368,7 @@ class DataSetConfigs(BaseModel): ) -class WorkspaceConfigs(BaseModel): +class WorkspaceConfig(BaseModel): """ Workspace configs """ @@ -379,7 +379,7 @@ class WorkspaceConfigs(BaseModel): ) -class IndexingConfigs(BaseModel): +class IndexingConfig(BaseModel): """ Indexing configs. """ @@ -390,36 +390,36 @@ class IndexingConfigs(BaseModel): ) -class ImageFormatConfigs(BaseModel): +class ImageFormatConfig(BaseModel): MULTIMODAL_SEND_IMAGE_FORMAT: str = Field( description='multi model send image format, support base64, url, default is base64', default='base64', ) -class FeatureConfigs( +class FeatureConfig( # place the configs in alphabet order - AppExecutionConfigs, - BillingConfigs, - CodeExecutionSandboxConfigs, - DataSetConfigs, - EndpointConfigs, - FileAccessConfigs, - FileUploadConfigs, - HttpConfigs, - ImageFormatConfigs, - InnerAPIConfigs, - IndexingConfigs, - LoggingConfigs, - MailConfigs, - ModelLoadBalanceConfigs, - ModerationConfigs, - OAuthConfigs, - RagEtlConfigs, - SecurityConfigs, - ToolConfigs, - UpdateConfigs, - WorkflowConfigs, - WorkspaceConfigs, + AppExecutionConfig, + BillingConfig, + CodeExecutionSandboxConfig, + DataSetConfig, + EndpointConfig, + FileAccessConfig, + FileUploadConfig, + HttpConfig, + ImageFormatConfig, + InnerAPIConfig, + IndexingConfig, + LoggingConfig, + MailConfig, + ModelLoadBalanceConfig, + ModerationConfig, + OAuthConfig, + RagEtlConfig, + SecurityConfig, + ToolConfig, + UpdateConfig, + WorkflowConfig, + WorkspaceConfig, ): pass diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 23fc620b78dbdb..5ea557c87dbfb7 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -1,8 +1,24 @@ -from typing import Optional +from typing import Any, Optional -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt, computed_field -from configs.middleware.redis_configs import RedisConfigs +from configs.middleware.redis_config import RedisConfig +from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig +from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig +from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig +from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig +from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig +from configs.middleware.vdb.chroma_configs import ChromaConfigs +from configs.middleware.vdb.milvus_configs import MilvusConfigs +from configs.middleware.vdb.opensearch_configs import OpenSearchConfigs +from configs.middleware.vdb.oracle_configs import OracleConfigs +from configs.middleware.vdb.pgvector_configs import PGVectorConfigs +from configs.middleware.vdb.pgvectors_configs import PGVectoRSConfigs +from configs.middleware.vdb.qdrant_configs import QdrantConfigs +from configs.middleware.vdb.relyt_configs import RelytConfigs +from configs.middleware.vdb.tencent_vector_configs import TencentVectorDBConfigs +from configs.middleware.vdb.tidb_vector_configs import TiDBVectorConfigs +from configs.middleware.vdb.weaviate_configs import WeaviateConfigs class StorageConfigs(BaseModel): @@ -33,11 +49,137 @@ class KeywordStoreConfigs(BaseModel): ) -class MiddlewareConfigs( +class DatabaseConfigs: + DB_HOST: str = Field( + description='db host', + default='localhost', + ) + + DB_PORT: PositiveInt = Field( + description='db port', + default=5432, + ) + + DB_USERNAME: str = Field( + description='db username', + default='postgres', + ) + + DB_PASSWORD: str = Field( + description='db password', + default='', + ) + + DB_DATABASE: str = Field( + description='db database', + default='dify', + ) + + DB_CHARSET: str = Field( + description='db charset', + default='', + ) + + SQLALCHEMY_DATABASE_URI_SCHEME: str = Field( + description='db uri scheme', + default='postgresql', + ) + + @computed_field + @property + def SQLALCHEMY_DATABASE_URI(self) -> str: + db_extras = f"?client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else "" + return (f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://" + f"{self.DB_USERNAME}:{self.DB_PASSWORD}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}" + f"{db_extras}") + + SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field( + description='pool size of SqlAlchemy', + default=30, + ) + + SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field( + description='max overflows for SqlAlchemy', + default=10, + ) + + SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field( + description='SqlAlchemy pool recycle', + default=3600, + ) + + SQLALCHEMY_POOL_PRE_PING: bool = Field( + description='whether to enable pool pre-ping in SqlAlchemy', + default=False, + ) + + SQLALCHEMY_ECHO: bool = Field( + description='whether to enable SqlAlchemy echo', + default=False, + ) + + @computed_field + @property + def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]: + return { + 'pool_size': self.SQLALCHEMY_POOL_SIZE, + 'max_overflow': self.SQLALCHEMY_MAX_OVERFLOW, + 'pool_recycle': self.SQLALCHEMY_POOL_RECYCLE, + 'pool_pre_ping': self.SQLALCHEMY_POOL_PRE_PING, + 'connect_args': {'options': '-c timezone=UTC'}, + } + + +class CeleryConfigs(DatabaseConfigs): + CELERY_BACKEND: str = Field( + description='Celery backend, available values are `database`, `redis`', + default='database', + ) + + CELERY_BROKER_URL: Optional[str] = Field( + description='CELERY_BROKER_URL', + default=None, + ) + + @computed_field + @property + def CELERY_RESULT_BACKEND(self) -> str: + return 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \ + if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL + + @computed_field + @property + def BROKER_USE_SSL(self) -> bool: + return self.CELERY_BROKER_URL.startswith('rediss://') if self.CELERY_BROKER_URL else False + + +class MiddlewareConfig( # place the configs in alphabet order + CeleryConfigs, + DatabaseConfigs, KeywordStoreConfigs, - RedisConfigs, + RedisConfig, + + # configs of storage and storage providers StorageConfigs, + AliyunOSSStorageConfig, + AzureBlobStorageConfig, + GoogleCloudStorageConfig, + TencentCloudCOSStorageConfig, + S3StorageConfig, + + # configs of vdb and vdb providers VectorStoreConfigs, + ChromaConfigs, + MilvusConfigs, + OpenSearchConfigs, + OracleConfigs, + PGVectorConfigs, + PGVectoRSConfigs, + QdrantConfigs, + RelytConfigs, + TencentVectorDBConfigs, + TiDBVectorConfigs, + WeaviateConfigs, ): pass diff --git a/api/configs/middleware/redis_configs.py b/api/configs/middleware/redis_config.py similarity index 96% rename from api/configs/middleware/redis_configs.py rename to api/configs/middleware/redis_config.py index 9f98e7cabfefa7..4cc40bbe6d46c6 100644 --- a/api/configs/middleware/redis_configs.py +++ b/api/configs/middleware/redis_config.py @@ -3,7 +3,7 @@ from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt -class RedisConfigs(BaseModel): +class RedisConfig(BaseModel): """ Redis configs """ diff --git a/api/configs/middleware/storage/aliyun_oss_storage_config.py b/api/configs/middleware/storage/aliyun_oss_storage_config.py new file mode 100644 index 00000000000000..67921149d6934b --- /dev/null +++ b/api/configs/middleware/storage/aliyun_oss_storage_config.py @@ -0,0 +1,39 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class AliyunOSSStorageConfig(BaseModel): + """ + Aliyun storage configs + """ + + ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field( + description='Aliyun storage ', + default=None, + ) + + ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field( + description='Aliyun storage access key', + default=None, + ) + + ALIYUN_OSS_SECRET_KEY: Optional[str] = Field( + description='Aliyun storage secret key', + default=None, + ) + + ALIYUN_OSS_ENDPOINT: Optional[str] = Field( + description='Aliyun storage endpoint URL', + default=None, + ) + + ALIYUN_OSS_REGION: Optional[str] = Field( + description='Aliyun storage region', + default=None, + ) + + ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field( + description='Aliyun storage authentication version', + default=None, + ) diff --git a/api/configs/middleware/storage/amazon_s3_storage_config.py b/api/configs/middleware/storage/amazon_s3_storage_config.py new file mode 100644 index 00000000000000..21fe425fa8d7fc --- /dev/null +++ b/api/configs/middleware/storage/amazon_s3_storage_config.py @@ -0,0 +1,44 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class S3StorageConfig(BaseModel): + """ + S3 storage configs + """ + + S3_ENDPOINT: Optional[str] = Field( + description='S3 storage endpoint', + default=None, + ) + + S3_REGION: Optional[str] = Field( + description='S3 storage region', + default=None, + ) + + S3_BUCKET_NAME: Optional[str] = Field( + description='S3 storage bucket name', + default=None, + ) + + S3_ACCESS_KEY: Optional[str] = Field( + description='S3 storage access key', + default=None, + ) + + S3_SECRET_KEY: Optional[str] = Field( + description='S3 storage secret key', + default=None, + ) + + S3_ADDRESS_STYLE: str = Field( + description='S3 storage address style', + default='auto', + ) + + S3_USE_AWS_MANAGED_IAM: bool = Field( + description='whether to use aws managed IAM for S3', + default=False, + ) diff --git a/api/configs/middleware/storage/azure_blob_storage_config.py b/api/configs/middleware/storage/azure_blob_storage_config.py new file mode 100644 index 00000000000000..a37aa496f16f07 --- /dev/null +++ b/api/configs/middleware/storage/azure_blob_storage_config.py @@ -0,0 +1,29 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class AzureBlobStorageConfig(BaseModel): + """ + Azure Blob storage configs + """ + + AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field( + description='Azure Blob account name', + default=None, + ) + + AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field( + description='Azure Blob account key', + default=None, + ) + + AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field( + description='Azure Blob container name', + default=None, + ) + + AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field( + description='Azure Blob account url', + default=None, + ) diff --git a/api/configs/middleware/storage/google_cloud_storage_config.py b/api/configs/middleware/storage/google_cloud_storage_config.py new file mode 100644 index 00000000000000..1f4d9f9883bb44 --- /dev/null +++ b/api/configs/middleware/storage/google_cloud_storage_config.py @@ -0,0 +1,19 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class GoogleCloudStorageConfig(BaseModel): + """ + Google Cloud storage configs + """ + + GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field( + description='Google Cloud storage bucket name', + default=None, + ) + + GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field( + description='Google Cloud storage service account json base64', + default=None, + ) diff --git a/api/configs/middleware/storage/tencent_cos_storage_config.py b/api/configs/middleware/storage/tencent_cos_storage_config.py new file mode 100644 index 00000000000000..1bcc4b7b442bff --- /dev/null +++ b/api/configs/middleware/storage/tencent_cos_storage_config.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class TencentCloudCOSStorageConfig(BaseModel): + """ + Tencent Cloud COS storage configs + """ + + TENCENT_COS_BUCKET_NAME: Optional[str] = Field( + description='Tencent Cloud COS bucket name', + default=None, + ) + + TENCENT_COS_REGION: Optional[str] = Field( + description='Tencent Cloud COS region', + default=None, + ) + + TENCENT_COS_SECRET_ID: Optional[str] = Field( + description='Tencent Cloud COS secret id', + default=None, + ) + + TENCENT_COS_SECRET_KEY: Optional[str] = Field( + description='Tencent Cloud COS secret key', + default=None, + ) + + TENCENT_COS_SCHEME: Optional[str] = Field( + description='Tencent Cloud COS scheme', + default=None, + ) diff --git a/api/configs/middleware/vdb/chroma_configs.py b/api/configs/middleware/vdb/chroma_configs.py new file mode 100644 index 00000000000000..ddb61a564d0bf2 --- /dev/null +++ b/api/configs/middleware/vdb/chroma_configs.py @@ -0,0 +1,39 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class ChromaConfigs(BaseModel): + """ + Chroma configs + """ + + CHROMA_HOST: Optional[str] = Field( + description='Chroma host', + default=None, + ) + + CHROMA_PORT: PositiveInt = Field( + description='Chroma port', + default=8000, + ) + + CHROMA_TENANT: Optional[str] = Field( + description='Chroma database', + default=None, + ) + + CHROMA_DATABASE: Optional[str] = Field( + description='Chroma database', + default=None, + ) + + CHROMA_AUTH_PROVIDER: Optional[str] = Field( + description='Chroma authentication provider', + default=None, + ) + + CHROMA_AUTH_CREDENTIALS: Optional[str] = Field( + description='Chroma authentication credentials', + default=None, + ) diff --git a/api/configs/middleware/vdb/milvus_configs.py b/api/configs/middleware/vdb/milvus_configs.py new file mode 100644 index 00000000000000..f291a0e0661de5 --- /dev/null +++ b/api/configs/middleware/vdb/milvus_configs.py @@ -0,0 +1,39 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class MilvusConfigs(BaseModel): + """ + Milvus configs + """ + + MILVUS_HOST: Optional[str] = Field( + description='Milvus host', + default=None, + ) + + MILVUS_PORT: PositiveInt = Field( + description='Milvus RestFul API port', + default=9091, + ) + + MILVUS_USER: Optional[str] = Field( + description='Milvus user', + default=None, + ) + + MILVUS_PASSWORD: Optional[str] = Field( + description='Milvus password', + default=None, + ) + + MILVUS_SECURE: bool = Field( + description='wheter to use SSL connection for Milvus', + default=False, + ) + + MILVUS_DATABASE: str = Field( + description='Milvus database', + default='default', + ) diff --git a/api/configs/middleware/vdb/opensearch_configs.py b/api/configs/middleware/vdb/opensearch_configs.py new file mode 100644 index 00000000000000..4e29c9c9d51ac0 --- /dev/null +++ b/api/configs/middleware/vdb/opensearch_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class OpenSearchConfigs(BaseModel): + """ + OpenSearch configs + """ + + OPENSEARCH_HOST: Optional[str] = Field( + description='OpenSearch host', + default=None, + ) + + OPENSEARCH_PORT: PositiveInt = Field( + description='OpenSearch port', + default=9200, + ) + + OPENSEARCH_USER: Optional[str] = Field( + description='OpenSearch user', + default=None, + ) + + OPENSEARCH_PASSWORD: Optional[str] = Field( + description='OpenSearch password', + default=None, + ) + + OPENSEARCH_SECURE: bool = Field( + description='whether to use SSL connection for OpenSearch', + default=False, + ) diff --git a/api/configs/middleware/vdb/oracle_configs.py b/api/configs/middleware/vdb/oracle_configs.py new file mode 100644 index 00000000000000..a70fd3efc2d3f9 --- /dev/null +++ b/api/configs/middleware/vdb/oracle_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class OracleConfigs(BaseModel): + """ + ORACLE configs + """ + + ORACLE_HOST: Optional[str] = Field( + description='ORACLE host', + default=None, + ) + + ORACLE_PORT: Optional[PositiveInt] = Field( + description='ORACLE port', + default=None, + ) + + ORACLE_USER: Optional[str] = Field( + description='ORACLE user', + default=None, + ) + + ORACLE_PASSWORD: Optional[str] = Field( + description='ORACLE password', + default=None, + ) + + ORACLE_DATABASE: Optional[str] = Field( + description='ORACLE database', + default=None, + ) diff --git a/api/configs/middleware/vdb/pgvector_configs.py b/api/configs/middleware/vdb/pgvector_configs.py new file mode 100644 index 00000000000000..2c6cf7486b7584 --- /dev/null +++ b/api/configs/middleware/vdb/pgvector_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class PGVectorConfigs(BaseModel): + """ + PGVector configs + """ + + PGVECTOR_HOST: Optional[str] = Field( + description='PGVector host', + default=None, + ) + + PGVECTOR_PORT: Optional[PositiveInt] = Field( + description='PGVector port', + default=None, + ) + + PGVECTOR_USER: Optional[str] = Field( + description='PGVector user', + default=None, + ) + + PGVECTOR_PASSWORD: Optional[str] = Field( + description='PGVector password', + default=None, + ) + + PGVECTOR_DATABASE: Optional[str] = Field( + description='PGVector database', + default=None, + ) diff --git a/api/configs/middleware/vdb/pgvectors_configs.py b/api/configs/middleware/vdb/pgvectors_configs.py new file mode 100644 index 00000000000000..e2c8c1f7250fcc --- /dev/null +++ b/api/configs/middleware/vdb/pgvectors_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class PGVectoRSConfigs(BaseModel): + """ + PGVectoRS configs + """ + + PGVECTO_RS_HOST: Optional[str] = Field( + description='PGVectoRS host', + default=None, + ) + + PGVECTO_RS_PORT: Optional[PositiveInt] = Field( + description='PGVectoRS port', + default=None, + ) + + PGVECTO_RS_USER: Optional[str] = Field( + description='PGVectoRS user', + default=None, + ) + + PGVECTO_RS_PASSWORD: Optional[str] = Field( + description='PGVectoRS password', + default=None, + ) + + PGVECTO_RS_DATABASE: Optional[str] = Field( + description='PGVectoRS database', + default=None, + ) diff --git a/api/configs/middleware/vdb/qdrant_configs.py b/api/configs/middleware/vdb/qdrant_configs.py new file mode 100644 index 00000000000000..bee49921e6e659 --- /dev/null +++ b/api/configs/middleware/vdb/qdrant_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt + + +class QdrantConfigs(BaseModel): + """ + Qdrant configs + """ + + QDRANT_URL: Optional[str] = Field( + description='Qdrant url', + default=None, + ) + + QDRANT_API_KEY: Optional[str] = Field( + description='Qdrant api key', + default=None, + ) + + QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field( + description='Qdrant client timeout in seconds', + default=20, + ) + + QDRANT_GRPC_ENABLED: bool = Field( + description='whether enable grpc support for Qdrant connection', + default=False, + ) + + QDRANT_GRPC_PORT: PositiveInt = Field( + description='Qdrant grpc port', + default=6334, + ) diff --git a/api/configs/middleware/vdb/relyt_configs.py b/api/configs/middleware/vdb/relyt_configs.py new file mode 100644 index 00000000000000..621ecfb17307bb --- /dev/null +++ b/api/configs/middleware/vdb/relyt_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class RelytConfigs(BaseModel): + """ + Relyt configs + """ + + RELYT_HOST: Optional[str] = Field( + description='Relyt host', + default=None, + ) + + RELYT_PORT: PositiveInt = Field( + description='Relyt port', + default=9200, + ) + + RELYT_USER: Optional[str] = Field( + description='Relyt user', + default=None, + ) + + RELYT_PASSWORD: Optional[str] = Field( + description='Relyt password', + default=None, + ) + + RELYT_DATABASE: Optional[str] = Field( + description='Relyt database', + default='default', + ) diff --git a/api/configs/middleware/vdb/tencent_vector_configs.py b/api/configs/middleware/vdb/tencent_vector_configs.py new file mode 100644 index 00000000000000..7a5418030b566a --- /dev/null +++ b/api/configs/middleware/vdb/tencent_vector_configs.py @@ -0,0 +1,44 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class TencentVectorDBConfigs(BaseModel): + """ + Tencent Vector configs + """ + + TENCENT_VECTOR_DB_URL: Optional[str] = Field( + description='Tencent Vector URL', + default=None, + ) + + TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field( + description='Tencent Vector api key', + default=None, + ) + + TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field( + description='Tencent Vector timeout', + default=30, + ) + + TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field( + description='Tencent Vector password', + default=None, + ) + + TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field( + description='Tencent Vector password', + default=None, + ) + + TENCENT_VECTOR_DB_SHARD: PositiveInt = Field( + description='Tencent Vector sharding number', + default=1, + ) + + TENCENT_VECTOR_DB_REPLICAS: PositiveInt = Field( + description='Tencent Vector replicas', + default=2, + ) diff --git a/api/configs/middleware/vdb/tidb_vector_configs.py b/api/configs/middleware/vdb/tidb_vector_configs.py new file mode 100644 index 00000000000000..5b81f8807bed90 --- /dev/null +++ b/api/configs/middleware/vdb/tidb_vector_configs.py @@ -0,0 +1,34 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class TiDBVectorConfigs(BaseModel): + """ + TiDB Vector configs + """ + + TIDB_VECTOR_HOST: Optional[str] = Field( + description='TiDB Vector host', + default=None, + ) + + TIDB_VECTOR_PORT: Optional[PositiveInt] = Field( + description='TiDB Vector port', + default=None, + ) + + TIDB_VECTOR_USER: Optional[str] = Field( + description='TiDB Vector user', + default=None, + ) + + TIDB_VECTOR_PASSWORD: Optional[str] = Field( + description='TiDB Vector password', + default=None, + ) + + TIDB_VECTOR_DATABASE: Optional[str] = Field( + description='TiDB Vector database', + default=None, + ) diff --git a/api/configs/middleware/vdb/weaviate_configs.py b/api/configs/middleware/vdb/weaviate_configs.py new file mode 100644 index 00000000000000..369b631b6f8966 --- /dev/null +++ b/api/configs/middleware/vdb/weaviate_configs.py @@ -0,0 +1,29 @@ +from typing import Optional + +from pydantic import BaseModel, Field, PositiveInt + + +class WeaviateConfigs(BaseModel): + """ + Weaviate configs + """ + + WEAVIATE_ENDPOINT: Optional[str] = Field( + description='Weaviate endpoint URL', + default=None, + ) + + WEAVIATE_API_KEY: Optional[str] = Field( + description='Weaviate API key', + default=None, + ) + + WEAVIATE_GRPC_ENABLED: bool = Field( + description='whether to enable gRPC for Weaviate connection', + default=True, + ) + + WEAVIATE_BATCH_SIZE: PositiveInt = Field( + description='Weaviate batch size', + default=100, + ) diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index ff832ac5daebb4..2024db65b29975 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -28,6 +28,7 @@ def parse_app_site_args(): required=False, location='json') parser.add_argument('prompt_public', type=bool, required=False, location='json') + parser.add_argument('show_workflow_steps', type=bool, required=False, location='json') return parser.parse_args() @@ -59,7 +60,8 @@ def post(self, app_model): 'privacy_policy', 'custom_disclaimer', 'customize_token_strategy', - 'prompt_public' + 'prompt_public', + 'show_workflow_steps' ]: value = args.get(attr_name) if value is not None: diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 8a24e584138ba8..67d6dc8e95f718 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,3 +1,5 @@ +from typing import cast + import flask_login from flask import current_app, request from flask_restful import Resource, reqparse @@ -5,8 +7,9 @@ import services from controllers.console import api from controllers.console.setup import setup_required -from libs.helper import email +from libs.helper import email, get_remote_ip from libs.password import valid_password +from models.account import Account from services.account_service import AccountService, TenantService @@ -34,10 +37,7 @@ def post(self): if len(tenants) == 0: return {'result': 'fail', 'data': 'workspace not found, please contact system admin to invite you to join in a workspace'} - AccountService.update_last_login(account, request) - - # todo: return the user info - token = AccountService.get_account_jwt_token(account) + token = AccountService.login(account, ip_address=get_remote_ip(request)) return {'result': 'success', 'data': token} @@ -46,6 +46,9 @@ class LogoutApi(Resource): @setup_required def get(self): + account = cast(Account, flask_login.current_user) + token = request.headers.get('Authorization', '').split(' ')[1] + AccountService.logout(account=account, token=token) flask_login.logout_user() return {'result': 'success'} diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index e5b80e9a57be33..2e4a627e066033 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -8,6 +8,7 @@ from constants.languages import languages from extensions.ext_database import db +from libs.helper import get_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo from models.account import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService @@ -78,9 +79,7 @@ def get(self, provider: str): TenantService.create_owner_tenant_if_not_exist(account) - AccountService.update_last_login(account, request) - - token = AccountService.get_account_jwt_token(account) + token = AccountService.login(account, ip_address=get_remote_ip(request)) return redirect(f'{current_app.config.get("CONSOLE_WEB_URL")}?console_token={token}') diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 4d6aadec8163ac..619ab4f7e2d5af 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -498,7 +498,7 @@ class DatasetRetrievalSettingApi(Resource): def get(self): vector_type = current_app.config['VECTOR_STORE'] match vector_type: - case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT: + case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT | VectorType.ORACLE: return { 'retrieval_method': [ RetrievalMethod.SEMANTIC_SEARCH @@ -522,7 +522,7 @@ class DatasetRetrievalSettingMockApi(Resource): @account_initialization_required def get(self, vector_type): match vector_type: - case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCEN: + case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT | VectorType.ORACLE: return { 'retrieval_method': [ RetrievalMethod.SEMANTIC_SEARCH diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index 1911559cff527d..a8fdde2791c426 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -4,7 +4,7 @@ from flask_restful import Resource, reqparse from extensions.ext_database import db -from libs.helper import email, str_len +from libs.helper import email, get_remote_ip, str_len from libs.password import valid_password from models.model import DifySetup from services.account_service import AccountService, RegisterService, TenantService @@ -61,7 +61,7 @@ def post(self): TenantService.create_owner_tenant_if_not_exist(account) setup() - AccountService.update_last_login(account, request) + AccountService.update_last_login(account, ip_address=get_remote_ip(request)) return {'result': 'success'}, 201 diff --git a/api/controllers/web/site.py b/api/controllers/web/site.py index a084b56b088024..c5c70d810a3a5b 100644 --- a/api/controllers/web/site.py +++ b/api/controllers/web/site.py @@ -33,7 +33,8 @@ class AppSiteApi(WebApiResource): 'privacy_policy': fields.String, 'custom_disclaimer': fields.String, 'default_language': fields.String, - 'prompt_public': fields.Boolean + 'prompt_public': fields.Boolean, + 'show_workflow_steps': fields.Boolean, } app_fields = { diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 397409622eaede..564dfd8973be2b 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -206,8 +206,8 @@ def custom_credentials_validate(self, credentials: dict) -> tuple[Provider, dict credentials[key] = encrypter.decrypt_token(self.tenant_id, original_credentials[key]) credentials = model_provider_factory.provider_credentials_validate( - self.provider.provider, - credentials + provider=self.provider.provider, + credentials=credentials ) for key, value in credentials.items(): diff --git a/api/core/extension/extensible.py b/api/core/extension/extensible.py index b10654256475d0..0296126d8b094f 100644 --- a/api/core/extension/extensible.py +++ b/api/core/extension/extensible.py @@ -1,5 +1,5 @@ import enum -import importlib +import importlib.util import json import logging import os @@ -74,6 +74,8 @@ def scan_extensions(cls): # Dynamic loading {subdir_name}.py file and find the subclass of Extensible py_path = os.path.join(subdir_path, extension_name + '.py') spec = importlib.util.spec_from_file_location(extension_name, py_path) + if not spec or not spec.loader: + raise Exception(f"Failed to load module {extension_name} from {py_path}") mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) @@ -108,6 +110,6 @@ def scan_extensions(cls): position=position )) - sorted_extensions = sort_to_dict_by_position_map(position_map, extensions, lambda x: x.name) + sorted_extensions = sort_to_dict_by_position_map(position_map=position_map, data=extensions, name_func=lambda x: x.name) return sorted_extensions diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index d3a4bab4a1e9c1..2000577a406e6f 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -5,11 +5,7 @@ from typing import AnyStr -def import_module_from_source( - module_name: str, - py_file_path: AnyStr, - use_lazy_loader: bool = False -) -> ModuleType: +def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_lazy_loader: bool = False) -> ModuleType: """ Importing a module from the source file directly """ @@ -17,9 +13,13 @@ def import_module_from_source( existed_spec = importlib.util.find_spec(module_name) if existed_spec: spec = existed_spec + if not spec.loader: + raise Exception(f"Failed to load module {module_name} from {py_file_path}") else: # Refer to: https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly spec = importlib.util.spec_from_file_location(module_name, py_file_path) + if not spec or not spec.loader: + raise Exception(f"Failed to load module {module_name} from {py_file_path}") if use_lazy_loader: # Refer to: https://docs.python.org/3/library/importlib.html#implementing-lazy-imports spec.loader = importlib.util.LazyLoader(spec.loader) @@ -29,7 +29,7 @@ def import_module_from_source( spec.loader.exec_module(module) return module except Exception as e: - logging.exception(f'Failed to load module {module_name} from {py_file_path}: {str(e)}') + logging.exception(f"Failed to load module {module_name} from {py_file_path}: {str(e)}") raise e @@ -43,15 +43,14 @@ def get_subclasses_from_module(mod: ModuleType, parent_type: type) -> list[type] def load_single_subclass_from_source( - module_name: str, - script_path: AnyStr, - parent_type: type, - use_lazy_loader: bool = False, + *, module_name: str, script_path: AnyStr, parent_type: type, use_lazy_loader: bool = False ) -> type: """ Load a single subclass from the source """ - module = import_module_from_source(module_name, script_path, use_lazy_loader) + module = import_module_from_source( + module_name=module_name, py_file_path=script_path, use_lazy_loader=use_lazy_loader + ) subclasses = get_subclasses_from_module(module, parent_type) match len(subclasses): case 1: diff --git a/api/core/helper/position_helper.py b/api/core/helper/position_helper.py index 689ab194a7bf86..e4ceeb652e0571 100644 --- a/api/core/helper/position_helper.py +++ b/api/core/helper/position_helper.py @@ -1,15 +1,12 @@ import os from collections import OrderedDict from collections.abc import Callable -from typing import Any, AnyStr +from typing import Any from core.tools.utils.yaml_utils import load_yaml_file -def get_position_map( - folder_path: AnyStr, - file_name: str = '_position.yaml', -) -> dict[str, int]: +def get_position_map(folder_path: str, *, file_name: str = "_position.yaml") -> dict[str, int]: """ Get the mapping from name to index from a YAML file :param folder_path: diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 51dff09609472f..e0b6960c232655 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -1,6 +1,6 @@ import logging import os -from collections.abc import Generator +from collections.abc import Callable, Generator from typing import IO, Optional, Union, cast from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle @@ -102,7 +102,7 @@ def _get_load_balancing_manager(self, configuration: ProviderConfiguration, def invoke_llm(self, prompt_messages: list[PromptMessage], model_parameters: Optional[dict] = None, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, - stream: bool = True, user: Optional[str] = None, callbacks: list[Callback] = None) \ + stream: bool = True, user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) \ -> Union[LLMResult, Generator]: """ Invoke large language model @@ -291,7 +291,7 @@ def invoke_tts(self, content_text: str, tenant_id: str, voice: str, streaming: b streaming=streaming ) - def _round_robin_invoke(self, function: callable, *args, **kwargs): + def _round_robin_invoke(self, function: Callable, *args, **kwargs): """ Round-robin invoke :param function: function to invoke @@ -437,6 +437,7 @@ def fetch_next(self) -> Optional[ModelLoadBalancingConfiguration]: while True: current_index = redis_client.incr(cache_key) + current_index = cast(int, current_index) if current_index >= 10000000: current_index = 1 redis_client.set(cache_key, current_index) @@ -499,7 +500,10 @@ def in_cooldown(self, config: ModelLoadBalancingConfiguration) -> bool: config.id ) - return redis_client.exists(cooldown_cache_key) + + res = redis_client.exists(cooldown_cache_key) + res = cast(bool, res) + return res @classmethod def get_config_in_cooldown_and_ttl(cls, tenant_id: str, @@ -528,4 +532,5 @@ def get_config_in_cooldown_and_ttl(cls, tenant_id: str, if ttl == -2: return False, 0 + ttl = cast(int, ttl) return True, ttl diff --git a/api/core/model_runtime/entities/provider_entities.py b/api/core/model_runtime/entities/provider_entities.py index f0a3997204be1a..f88f89d5886332 100644 --- a/api/core/model_runtime/entities/provider_entities.py +++ b/api/core/model_runtime/entities/provider_entities.py @@ -1,10 +1,11 @@ +from collections.abc import Sequence from enum import Enum from typing import Optional from pydantic import BaseModel, ConfigDict from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType, ProviderModel +from core.model_runtime.entities.model_entities import ModelType, ProviderModel class ConfigurateMethod(Enum): @@ -93,8 +94,8 @@ class SimpleProviderEntity(BaseModel): label: I18nObject icon_small: Optional[I18nObject] = None icon_large: Optional[I18nObject] = None - supported_model_types: list[ModelType] - models: list[AIModelEntity] = [] + supported_model_types: Sequence[ModelType] + models: list[ProviderModel] = [] class ProviderHelpEntity(BaseModel): @@ -116,7 +117,7 @@ class ProviderEntity(BaseModel): icon_large: Optional[I18nObject] = None background: Optional[str] = None help: Optional[ProviderHelpEntity] = None - supported_model_types: list[ModelType] + supported_model_types: Sequence[ModelType] configurate_methods: list[ConfigurateMethod] models: list[ProviderModel] = [] provider_credential_schema: Optional[ProviderCredentialSchema] = None diff --git a/api/core/model_runtime/model_providers/__base/ai_model.py b/api/core/model_runtime/model_providers/__base/ai_model.py index 83cfffa6116dc3..04b539433c7788 100644 --- a/api/core/model_runtime/model_providers/__base/ai_model.py +++ b/api/core/model_runtime/model_providers/__base/ai_model.py @@ -1,6 +1,7 @@ import decimal import os from abc import ABC, abstractmethod +from collections.abc import Mapping from typing import Optional from pydantic import ConfigDict @@ -26,15 +27,16 @@ class AIModel(ABC): """ Base class for all models. """ + model_type: ModelType - model_schemas: list[AIModelEntity] = None + model_schemas: Optional[list[AIModelEntity]] = None started_at: float = 0 # pydantic configs model_config = ConfigDict(protected_namespaces=()) @abstractmethod - def validate_credentials(self, model: str, credentials: dict) -> None: + def validate_credentials(self, model: str, credentials: Mapping) -> None: """ Validate model credentials @@ -90,8 +92,8 @@ def get_price(self, model: str, credentials: dict, price_type: PriceType, tokens # get price info from predefined model schema price_config: Optional[PriceConfig] = None - if model_schema: - price_config: PriceConfig = model_schema.pricing + if model_schema and model_schema.pricing: + price_config = model_schema.pricing # get unit price unit_price = None @@ -103,13 +105,15 @@ def get_price(self, model: str, credentials: dict, price_type: PriceType, tokens if unit_price is None: return PriceInfo( - unit_price=decimal.Decimal('0.0'), - unit=decimal.Decimal('0.0'), - total_amount=decimal.Decimal('0.0'), + unit_price=decimal.Decimal("0.0"), + unit=decimal.Decimal("0.0"), + total_amount=decimal.Decimal("0.0"), currency="USD", ) # calculate total amount + if not price_config: + raise ValueError(f"Price config not found for model {model}") total_amount = tokens * unit_price * price_config.unit total_amount = total_amount.quantize(decimal.Decimal('0.0000001'), rounding=decimal.ROUND_HALF_UP) @@ -209,7 +213,7 @@ def predefined_models(self) -> list[AIModelEntity]: return model_schemas - def get_model_schema(self, model: str, credentials: Optional[dict] = None) -> Optional[AIModelEntity]: + def get_model_schema(self, model: str, credentials: Optional[Mapping] = None) -> Optional[AIModelEntity]: """ Get model schema by model name and credentials @@ -231,7 +235,7 @@ def get_model_schema(self, model: str, credentials: Optional[dict] = None) -> Op return None - def get_customizable_model_schema_from_credentials(self, model: str, credentials: dict) -> Optional[AIModelEntity]: + def get_customizable_model_schema_from_credentials(self, model: str, credentials: Mapping) -> Optional[AIModelEntity]: """ Get customizable model schema from credentials @@ -240,8 +244,8 @@ def get_customizable_model_schema_from_credentials(self, model: str, credentials :return: model schema """ return self._get_customizable_model_schema(model, credentials) - - def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: + + def _get_customizable_model_schema(self, model: str, credentials: Mapping) -> Optional[AIModelEntity]: """ Get customizable model schema and fill in the template """ @@ -249,7 +253,7 @@ def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optio if not schema: return None - + # fill in the template new_parameter_rules = [] for parameter_rule in schema.parameter_rules: @@ -271,10 +275,20 @@ def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optio parameter_rule.help = I18nObject( en_US=default_parameter_rule['help']['en_US'], ) - if not parameter_rule.help.en_US and ('help' in default_parameter_rule and 'en_US' in default_parameter_rule['help']): - parameter_rule.help.en_US = default_parameter_rule['help']['en_US'] - if not parameter_rule.help.zh_Hans and ('help' in default_parameter_rule and 'zh_Hans' in default_parameter_rule['help']): - parameter_rule.help.zh_Hans = default_parameter_rule['help'].get('zh_Hans', default_parameter_rule['help']['en_US']) + if ( + parameter_rule.help + and not parameter_rule.help.en_US + and ("help" in default_parameter_rule and "en_US" in default_parameter_rule["help"]) + ): + parameter_rule.help.en_US = default_parameter_rule["help"]["en_US"] + if ( + parameter_rule.help + and not parameter_rule.help.zh_Hans + and ("help" in default_parameter_rule and "zh_Hans" in default_parameter_rule["help"]) + ): + parameter_rule.help.zh_Hans = default_parameter_rule["help"].get( + "zh_Hans", default_parameter_rule["help"]["en_US"] + ) except ValueError: pass @@ -284,7 +298,7 @@ def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optio return schema - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: + def get_customizable_model_schema(self, model: str, credentials: Mapping) -> Optional[AIModelEntity]: """ Get customizable model schema @@ -304,7 +318,7 @@ def _get_default_parameter_rule_variable_map(self, name: DefaultParameterName) - default_parameter_rule = PARAMETER_RULE_TEMPLATE.get(name) if not default_parameter_rule: - raise Exception(f'Invalid model parameter rule name {name}') + raise Exception(f"Invalid model parameter rule name {name}") return default_parameter_rule @@ -318,4 +332,4 @@ def _get_num_tokens_by_gpt2(self, text: str) -> int: :param text: plain text of prompt. You need to convert the original message to plain text :return: number of tokens """ - return GPT2Tokenizer.get_num_tokens(text) \ No newline at end of file + return GPT2Tokenizer.get_num_tokens(text) diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index ef633c61cd01da..02ba0c9410f937 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -3,7 +3,7 @@ import re import time from abc import abstractmethod -from collections.abc import Generator +from collections.abc import Generator, Mapping from typing import Optional, Union from pydantic import ConfigDict @@ -43,7 +43,7 @@ class LargeLanguageModel(AIModel): def invoke(self, model: str, credentials: dict, prompt_messages: list[PromptMessage], model_parameters: Optional[dict] = None, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, - stream: bool = True, user: Optional[str] = None, callbacks: list[Callback] = None) \ + stream: bool = True, user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) \ -> Union[LLMResult, Generator]: """ Invoke large language model @@ -129,7 +129,7 @@ def invoke(self, model: str, credentials: dict, user=user, callbacks=callbacks ) - else: + elif isinstance(result, LLMResult): self._trigger_after_invoke_callbacks( model=model, result=result, @@ -148,7 +148,7 @@ def invoke(self, model: str, credentials: dict, def _code_block_mode_wrapper(self, model: str, credentials: dict, prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, - callbacks: list[Callback] = None) -> Union[LLMResult, Generator]: + callbacks: Optional[list[Callback]] = None) -> Union[LLMResult, Generator]: """ Code block mode wrapper, ensure the response is a code block with output markdown quote @@ -196,7 +196,7 @@ def _code_block_mode_wrapper(self, model: str, credentials: dict, prompt_message # override the system message prompt_messages[0] = SystemPromptMessage( content=block_prompts - .replace("{{instructions}}", prompt_messages[0].content) + .replace("{{instructions}}", str(prompt_messages[0].content)) ) else: # insert the system message @@ -274,8 +274,9 @@ def _code_block_mode_stream_processor(self, model: str, prompt_messages: list[Pr else: yield piece continue - new_piece = "" + new_piece: str = "" for char in piece: + char = str(char) if state == "normal": if char == "`": state = "in_backticks" @@ -340,7 +341,7 @@ def _code_block_mode_stream_processor_with_backtick(self, model: str, prompt_mes if state == "done": continue - new_piece = "" + new_piece: str = "" for char in piece: if state == "search_start": if char == "`": @@ -365,7 +366,7 @@ def _code_block_mode_stream_processor_with_backtick(self, model: str, prompt_mes # If backticks were counted but we're still collecting content, it was a false start new_piece += "`" * backtick_count backtick_count = 0 - new_piece += char + new_piece += str(char) elif state == "done": break @@ -388,13 +389,14 @@ def _invoke_result_generator(self, model: str, result: Generator, credentials: d prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, - user: Optional[str] = None, callbacks: list[Callback] = None) -> Generator: + user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) -> Generator: """ Invoke result generator :param result: result generator :return: result generator """ + callbacks = callbacks or [] prompt_message = AssistantPromptMessage( content="" ) @@ -487,36 +489,6 @@ def enforce_stop_tokens(self, text: str, stop: list[str]) -> str: """Cut off the text as soon as any stop words occur.""" return re.split("|".join(stop), text, maxsplit=1)[0] - def _llm_result_to_stream(self, result: LLMResult) -> Generator: - """ - Transform llm result to stream - - :param result: llm result - :return: stream - """ - index = 0 - - tool_calls = result.message.tool_calls - - for word in result.message.content: - assistant_prompt_message = AssistantPromptMessage( - content=word, - tool_calls=tool_calls if index == (len(result.message.content) - 1) else [] - ) - - yield LLMResultChunk( - model=result.model, - prompt_messages=result.prompt_messages, - system_fingerprint=result.system_fingerprint, - delta=LLMResultChunkDelta( - index=index, - message=assistant_prompt_message, - ) - ) - - index += 1 - time.sleep(0.01) - def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRule]: """ Get parameter rules @@ -531,7 +503,7 @@ def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRu return [] - def get_model_mode(self, model: str, credentials: Optional[dict] = None) -> LLMMode: + def get_model_mode(self, model: str, credentials: Optional[Mapping] = None) -> LLMMode: """ Get model mode @@ -595,7 +567,7 @@ def _trigger_before_invoke_callbacks(self, model: str, credentials: dict, prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, - user: Optional[str] = None, callbacks: list[Callback] = None) -> None: + user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) -> None: """ Trigger before invoke callbacks @@ -633,7 +605,7 @@ def _trigger_new_chunk_callbacks(self, chunk: LLMResultChunk, model: str, creden prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, - user: Optional[str] = None, callbacks: list[Callback] = None) -> None: + user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) -> None: """ Trigger new chunk callbacks @@ -672,7 +644,7 @@ def _trigger_after_invoke_callbacks(self, model: str, result: LLMResult, credent prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, - user: Optional[str] = None, callbacks: list[Callback] = None) -> None: + user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) -> None: """ Trigger after invoke callbacks @@ -712,7 +684,7 @@ def _trigger_invoke_error_callbacks(self, model: str, ex: Exception, credentials prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, - user: Optional[str] = None, callbacks: list[Callback] = None) -> None: + user: Optional[str] = None, callbacks: Optional[list[Callback]] = None) -> None: """ Trigger invoke error callbacks diff --git a/api/core/model_runtime/model_providers/__base/model_provider.py b/api/core/model_runtime/model_providers/__base/model_provider.py index a893d023c0452d..51dd3b7e28b776 100644 --- a/api/core/model_runtime/model_providers/__base/model_provider.py +++ b/api/core/model_runtime/model_providers/__base/model_provider.py @@ -1,5 +1,6 @@ import os from abc import ABC, abstractmethod +from typing import Optional from core.helper.module_import_helper import get_subclasses_from_module, import_module_from_source from core.model_runtime.entities.model_entities import AIModelEntity, ModelType @@ -9,7 +10,7 @@ class ModelProvider(ABC): - provider_schema: ProviderEntity = None + provider_schema: Optional[ProviderEntity] = None model_instance_map: dict[str, AIModel] = {} @abstractmethod @@ -28,23 +29,23 @@ def validate_provider_credentials(self, credentials: dict) -> None: def get_provider_schema(self) -> ProviderEntity: """ Get provider schema - + :return: provider schema """ if self.provider_schema: return self.provider_schema - + # get dirname of the current path provider_name = self.__class__.__module__.split('.')[-1] # get the path of the model_provider classes base_path = os.path.abspath(__file__) current_path = os.path.join(os.path.dirname(os.path.dirname(base_path)), provider_name) - + # read provider schema from yaml file yaml_path = os.path.join(current_path, f'{provider_name}.yaml') yaml_data = load_yaml_file(yaml_path, ignore_error=True) - + try: # yaml_data to entity provider_schema = ProviderEntity(**yaml_data) @@ -53,7 +54,7 @@ def get_provider_schema(self) -> ProviderEntity: # cache schema self.provider_schema = provider_schema - + return provider_schema def models(self, model_type: ModelType) -> list[AIModelEntity]: @@ -84,7 +85,7 @@ def get_model_instance(self, model_type: ModelType) -> AIModel: :return: """ # get dirname of the current path - provider_name = self.__class__.__module__.split('.')[-1] + provider_name = self.__class__.__module__.split(".")[-1] if f"{provider_name}.{model_type.value}" in self.model_instance_map: return self.model_instance_map[f"{provider_name}.{model_type.value}"] @@ -101,11 +102,17 @@ def get_model_instance(self, model_type: ModelType) -> AIModel: # Dynamic loading {model_type_name}.py file and find the subclass of AIModel parent_module = '.'.join(self.__class__.__module__.split('.')[:-1]) mod = import_module_from_source( - f'{parent_module}.{model_type_name}.{model_type_name}', model_type_py_path) - model_class = next(filter(lambda x: x.__module__ == mod.__name__ and not x.__abstractmethods__, - get_subclasses_from_module(mod, AIModel)), None) + module_name=f"{parent_module}.{model_type_name}.{model_type_name}", py_file_path=model_type_py_path + ) + model_class = next( + filter( + lambda x: x.__module__ == mod.__name__ and not x.__abstractmethods__, + get_subclasses_from_module(mod, AIModel), + ), + None, + ) if not model_class: - raise Exception(f'Missing AIModel Class for model type {model_type} in {model_type_py_path}') + raise Exception(f"Missing AIModel Class for model type {model_type} in {model_type_py_path}") model_instance_map = model_class() self.model_instance_map[f"{provider_name}.{model_type.value}"] = model_instance_map diff --git a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml b/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml index e7b002878a5d77..8394c4276a786e 100644 --- a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml @@ -1,3 +1,5 @@ +- claude-3-5-sonnet-20240620 +- claude-3-haiku-20240307 - claude-3-opus-20240229 - claude-3-sonnet-20240229 - claude-2.1 diff --git a/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml new file mode 100644 index 00000000000000..72d4d8545b9723 --- /dev/null +++ b/api/core/model_runtime/model_providers/anthropic/llm/claude-3-5-sonnet-20240620.yaml @@ -0,0 +1,39 @@ +model: claude-3-5-sonnet-20240620 +label: + en_US: claude-3-5-sonnet-20240620 +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: max_tokens + use_template: max_tokens + required: true + default: 4096 + min: 1 + max: 4096 + - name: response_format + use_template: response_format +pricing: + input: '3.00' + output: '15.00' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml new file mode 100644 index 00000000000000..2ae7b8ffaa6163 --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/anthropic.claude-3-sonnet-v1.5.yaml @@ -0,0 +1,56 @@ +model: anthropic.claude-3-5-sonnet-20240620-v1:0 +label: + en_US: Claude 3.5 Sonnet +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 200000 +# docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index a4dbaabfc9827b..b1660afafb12e4 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -1,5 +1,6 @@ import logging import os +from collections.abc import Sequence from typing import Optional from pydantic import BaseModel, ConfigDict @@ -16,20 +17,21 @@ class ModelProviderExtension(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + provider_instance: ModelProvider name: str position: Optional[int] = None - model_config = ConfigDict(arbitrary_types_allowed=True) class ModelProviderFactory: - model_provider_extensions: dict[str, ModelProviderExtension] = None + model_provider_extensions: Optional[dict[str, ModelProviderExtension]] = None def __init__(self) -> None: # for cache in memory self.get_providers() - def get_providers(self) -> list[ProviderEntity]: + def get_providers(self) -> Sequence[ProviderEntity]: """ Get all providers :return: list of providers @@ -39,7 +41,7 @@ def get_providers(self) -> list[ProviderEntity]: # traverse all model_provider_extensions providers = [] - for name, model_provider_extension in model_provider_extensions.items(): + for model_provider_extension in model_provider_extensions.values(): # get model_provider instance model_provider_instance = model_provider_extension.provider_instance @@ -57,7 +59,7 @@ def get_providers(self) -> list[ProviderEntity]: # return providers return providers - def provider_credentials_validate(self, provider: str, credentials: dict) -> dict: + def provider_credentials_validate(self, *, provider: str, credentials: dict) -> dict: """ Validate provider credentials @@ -74,6 +76,9 @@ def provider_credentials_validate(self, provider: str, credentials: dict) -> dic # get provider_credential_schema and validate credentials according to the rules provider_credential_schema = provider_schema.provider_credential_schema + if not provider_credential_schema: + raise ValueError(f"Provider {provider} does not have provider_credential_schema") + # validate provider credential schema validator = ProviderCredentialSchemaValidator(provider_credential_schema) filtered_credentials = validator.validate_and_filter(credentials) @@ -83,8 +88,9 @@ def provider_credentials_validate(self, provider: str, credentials: dict) -> dic return filtered_credentials - def model_credentials_validate(self, provider: str, model_type: ModelType, - model: str, credentials: dict) -> dict: + def model_credentials_validate( + self, *, provider: str, model_type: ModelType, model: str, credentials: dict + ) -> dict: """ Validate model credentials @@ -103,6 +109,9 @@ def model_credentials_validate(self, provider: str, model_type: ModelType, # get model_credential_schema and validate credentials according to the rules model_credential_schema = provider_schema.model_credential_schema + if not model_credential_schema: + raise ValueError(f"Provider {provider} does not have model_credential_schema") + # validate model credential schema validator = ModelCredentialSchemaValidator(model_type, model_credential_schema) filtered_credentials = validator.validate_and_filter(credentials) @@ -115,11 +124,13 @@ def model_credentials_validate(self, provider: str, model_type: ModelType, return filtered_credentials - def get_models(self, - provider: Optional[str] = None, - model_type: Optional[ModelType] = None, - provider_configs: Optional[list[ProviderConfig]] = None) \ - -> list[SimpleProviderEntity]: + def get_models( + self, + *, + provider: Optional[str] = None, + model_type: Optional[ModelType] = None, + provider_configs: Optional[list[ProviderConfig]] = None, + ) -> list[SimpleProviderEntity]: """ Get all models for given model type @@ -128,6 +139,8 @@ def get_models(self, :param provider_configs: list of provider configs :return: list of models """ + provider_configs = provider_configs or [] + # scan all providers model_provider_extensions = self._get_model_provider_map() @@ -184,7 +197,7 @@ def get_provider_instance(self, provider: str) -> ModelProvider: # get the provider extension model_provider_extension = model_provider_extensions.get(provider) if not model_provider_extension: - raise Exception(f'Invalid provider: {provider}') + raise Exception(f"Invalid provider: {provider}") # get the provider instance model_provider_instance = model_provider_extension.provider_instance @@ -192,10 +205,22 @@ def get_provider_instance(self, provider: str) -> ModelProvider: return model_provider_instance def _get_model_provider_map(self) -> dict[str, ModelProviderExtension]: + """ + Retrieves the model provider map. + + This method retrieves the model provider map, which is a dictionary containing the model provider names as keys + and instances of `ModelProviderExtension` as values. The model provider map is used to store information about + available model providers. + + Returns: + A dictionary containing the model provider map. + + Raises: + None. + """ if self.model_provider_extensions: return self.model_provider_extensions - # get the path of current classes current_path = os.path.abspath(__file__) model_providers_path = os.path.dirname(current_path) @@ -204,8 +229,8 @@ def _get_model_provider_map(self) -> dict[str, ModelProviderExtension]: model_provider_dir_paths = [ os.path.join(model_providers_path, model_provider_dir) for model_provider_dir in os.listdir(model_providers_path) - if not model_provider_dir.startswith('__') - and os.path.isdir(os.path.join(model_providers_path, model_provider_dir)) + if not model_provider_dir.startswith("__") + and os.path.isdir(os.path.join(model_providers_path, model_provider_dir)) ] # get _position.yaml file path @@ -219,30 +244,33 @@ def _get_model_provider_map(self) -> dict[str, ModelProviderExtension]: file_names = os.listdir(model_provider_dir_path) - if (model_provider_name + '.py') not in file_names: + if (model_provider_name + ".py") not in file_names: logger.warning(f"Missing {model_provider_name}.py file in {model_provider_dir_path}, Skip.") continue # Dynamic loading {model_provider_name}.py file and find the subclass of ModelProvider - py_path = os.path.join(model_provider_dir_path, model_provider_name + '.py') + py_path = os.path.join(model_provider_dir_path, model_provider_name + ".py") model_provider_class = load_single_subclass_from_source( - module_name=f'core.model_runtime.model_providers.{model_provider_name}.{model_provider_name}', + module_name=f"core.model_runtime.model_providers.{model_provider_name}.{model_provider_name}", script_path=py_path, - parent_type=ModelProvider) + parent_type=ModelProvider, + ) if not model_provider_class: logger.warning(f"Missing Model Provider Class that extends ModelProvider in {py_path}, Skip.") continue - if f'{model_provider_name}.yaml' not in file_names: + if f"{model_provider_name}.yaml" not in file_names: logger.warning(f"Missing {model_provider_name}.yaml file in {model_provider_dir_path}, Skip.") continue - model_providers.append(ModelProviderExtension( - name=model_provider_name, - provider_instance=model_provider_class(), - position=position_map.get(model_provider_name) - )) + model_providers.append( + ModelProviderExtension( + name=model_provider_name, + provider_instance=model_provider_class(), + position=position_map.get(model_provider_name), + ) + ) sorted_extensions = sort_to_dict_by_position_map(position_map, model_providers, lambda x: x.name) diff --git a/api/core/model_runtime/model_providers/openai/_common.py b/api/core/model_runtime/model_providers/openai/_common.py index 5772f325e1ac28..467a51daf2a278 100644 --- a/api/core/model_runtime/model_providers/openai/_common.py +++ b/api/core/model_runtime/model_providers/openai/_common.py @@ -1,3 +1,5 @@ +from collections.abc import Mapping + import openai from httpx import Timeout @@ -12,7 +14,7 @@ class _CommonOpenAI: - def _to_credential_kwargs(self, credentials: dict) -> dict: + def _to_credential_kwargs(self, credentials: Mapping) -> dict: """ Transform credentials to kwargs for model instance @@ -25,9 +27,9 @@ def _to_credential_kwargs(self, credentials: dict) -> dict: "max_retries": 1, } - if credentials.get('openai_api_base'): - credentials['openai_api_base'] = credentials['openai_api_base'].rstrip('/') - credentials_kwargs['base_url'] = credentials['openai_api_base'] + '/v1' + if credentials.get("openai_api_base"): + openai_api_base = credentials["openai_api_base"].rstrip("/") + credentials_kwargs["base_url"] = openai_api_base + "/v1" if 'openai_organization' in credentials: credentials_kwargs['organization'] = credentials['openai_organization'] @@ -45,24 +47,14 @@ def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]] :return: Invoke error mapping """ return { - InvokeConnectionError: [ - openai.APIConnectionError, - openai.APITimeoutError - ], - InvokeServerUnavailableError: [ - openai.InternalServerError - ], - InvokeRateLimitError: [ - openai.RateLimitError - ], - InvokeAuthorizationError: [ - openai.AuthenticationError, - openai.PermissionDeniedError - ], + InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError], + InvokeServerUnavailableError: [openai.InternalServerError], + InvokeRateLimitError: [openai.RateLimitError], + InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError], InvokeBadRequestError: [ openai.BadRequestError, openai.NotFoundError, openai.UnprocessableEntityError, - openai.APIError - ] + openai.APIError, + ], } diff --git a/api/core/model_runtime/model_providers/openai/openai.py b/api/core/model_runtime/model_providers/openai/openai.py index d4a4e24c9790d7..66efd4797f621a 100644 --- a/api/core/model_runtime/model_providers/openai/openai.py +++ b/api/core/model_runtime/model_providers/openai/openai.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Mapping from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError @@ -9,7 +10,7 @@ class OpenAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: + def validate_provider_credentials(self, credentials: Mapping) -> None: """ Validate provider credentials if validate failed, raise exception diff --git a/api/core/model_runtime/model_providers/tongyi/llm/llm.py b/api/core/model_runtime/model_providers/tongyi/llm/llm.py index 3d0a80144c6ddc..41d8f37aaf8477 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/llm.py +++ b/api/core/model_runtime/model_providers/tongyi/llm/llm.py @@ -18,7 +18,7 @@ ) from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta +from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, ImagePromptMessageContent, @@ -82,6 +82,7 @@ def _code_block_mode_wrapper(self, model: str, credentials: dict, {{instructions}} +You should also complete the text started with ``` but not tell ``` directly. """ code_block = model_parameters.get("response_format", "") @@ -113,21 +114,17 @@ def _code_block_mode_wrapper(self, model: str, credentials: dict, # insert the system message prompt_messages.insert(0, SystemPromptMessage( content=block_prompts - .replace("{{instructions}}", f"Please output a valid {code_block} object.") + .replace("{{instructions}}", f"Please output a valid {code_block} with markdown codeblocks.") )) - mode = self.get_model_mode(model, credentials) - if mode == LLMMode.CHAT: - if len(prompt_messages) > 0 and isinstance(prompt_messages[-1], UserPromptMessage): - # add ```JSON\n to the last message - prompt_messages[-1].content += f"\n```{code_block}\n" - else: - # append a user message - prompt_messages.append(UserPromptMessage( - content=f"```{code_block}\n" - )) + if len(prompt_messages) > 0 and isinstance(prompt_messages[-1], UserPromptMessage): + # add ```JSON\n to the last message + prompt_messages[-1].content += f"\n```{code_block}\n" else: - prompt_messages.append(AssistantPromptMessage(content=f"```{code_block}\n")) + # append a user message + prompt_messages.append(UserPromptMessage( + content=f"```{code_block}\n" + )) response = self._invoke( model=model, @@ -243,11 +240,8 @@ def _generate(self, model: str, credentials: dict, response = MultiModalConversation.call(**params, stream=stream) else: - if mode == LLMMode.CHAT: - params['messages'] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages) - else: - params['prompt'] = prompt_messages[0].content.rstrip() - + # nothing different between chat model and completion model in tongyi + params['messages'] = self._convert_prompt_messages_to_tongyi_messages(prompt_messages) response = Generation.call(**params, result_format='message', stream=stream) diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml index ae3ec0fc040a2f..5681f5c7b06665 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus-chat.yaml @@ -22,9 +22,9 @@ parameter_rules: - name: max_tokens use_template: max_tokens type: int - default: 1500 + default: 2000 min: 1 - max: 1500 + max: 2000 help: zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml index bfa04792a0c642..71dabb55f07fd8 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml @@ -20,9 +20,9 @@ parameter_rules: - name: max_tokens use_template: max_tokens type: int - default: 1500 + default: 2000 min: 1 - max: 1500 + max: 2000 help: zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml new file mode 100644 index 00000000000000..c64384e6a2f153 --- /dev/null +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet.yaml @@ -0,0 +1,55 @@ +model: claude-3-5-sonnet@20240620 +label: + en_US: Claude 3.5 Sonnet +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + required: true + type: int + default: 4096 + min: 1 + max: 4096 + help: + zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 + en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. + - name: temperature + use_template: temperature + required: false + type: float + default: 1 + min: 0.0 + max: 1.0 + help: + zh_Hans: 生成内容的随机性。 + en_US: The amount of randomness injected into the response. + - name: top_p + required: false + type: float + default: 0.999 + min: 0.000 + max: 1.000 + help: + zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 + en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. + - name: top_k + required: false + type: int + default: 0 + min: 0 + # tip docs from aws has error, max value is 500 + max: 500 + help: + zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 + en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. +pricing: + input: '0.003' + output: '0.015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py index 6b2258f873789d..804c3535fbdb7e 100644 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py @@ -100,7 +100,7 @@ def _generate_anthropic(self, model: str, credentials: dict, prompt_messages: li token = credentials.token # Vertex AI Anthropic Claude3 Opus model available in us-east5 region, Sonnet and Haiku available in us-central1 region - if 'opus' in model: + if 'opus' or 'claude-3-5-sonnet' in model: location = 'us-east5' else: location = 'us-central1' diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py index ee09b8cb742a5d..ff971964a8603e 100644 --- a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py +++ b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py @@ -10,6 +10,7 @@ PromptMessageRole, PromptMessageTool, SystemPromptMessage, + ToolPromptMessage, UserPromptMessage, ) from core.model_runtime.errors.validate import CredentialsValidateFailedError @@ -463,6 +464,8 @@ def _convert_one_message_to_text(self, message: PromptMessage) -> str: message_text = f"{ai_prompt} {content}" elif isinstance(message, SystemPromptMessage): message_text = content + elif isinstance(message, ToolPromptMessage): + message_text = content else: raise ValueError(f"Got unknown type {message}") diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 4a0961e969511b..665a697e1ab265 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -6,7 +6,6 @@ from flask import current_app from pydantic import BaseModel, model_validator from pymilvus import MilvusClient, MilvusException, connections -from pymilvus.milvus_client import IndexParams from core.rag.datasource.entity.embedding import Embeddings from core.rag.datasource.vdb.field import Field @@ -255,15 +254,11 @@ def create_collection( # Since primary field is auto-id, no need to track it self._fields.remove(Field.PRIMARY_KEY.value) - # Create Index params for the collection - index_params_obj = IndexParams() - index_params_obj.add_index(field_name=Field.VECTOR.value, **index_params) - # Create the collection collection_name = self._collection_name - self._client.create_collection(collection_name=collection_name, - schema=schema, index_params=index_params_obj, - consistency_level=self._consistency_level) + self._client.create_collection_with_schema(collection_name=collection_name, + schema=schema, index_param=index_params, + consistency_level=self._consistency_level) redis_client.set(collection_exist_cache_key, 1, ex=3600) def _init_client(self, config) -> MilvusClient: diff --git a/api/core/rag/datasource/vdb/oracle/__init__.py b/api/core/rag/datasource/vdb/oracle/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/api/core/rag/datasource/vdb/oracle/oraclevector.py b/api/core/rag/datasource/vdb/oracle/oraclevector.py new file mode 100644 index 00000000000000..c087ed0cd82290 --- /dev/null +++ b/api/core/rag/datasource/vdb/oracle/oraclevector.py @@ -0,0 +1,239 @@ +import array +import json +import uuid +from contextlib import contextmanager +from typing import Any + +import numpy +import oracledb +from flask import current_app +from pydantic import BaseModel, model_validator + +from core.rag.datasource.entity.embedding import Embeddings +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +oracledb.defaults.fetch_lobs = False + + +class OracleVectorConfig(BaseModel): + host: str + port: int + user: str + password: str + database: str + + @model_validator(mode='before') + def validate_config(cls, values: dict) -> dict: + if not values["host"]: + raise ValueError("config ORACLE_HOST is required") + if not values["port"]: + raise ValueError("config ORACLE_PORT is required") + if not values["user"]: + raise ValueError("config ORACLE_USER is required") + if not values["password"]: + raise ValueError("config ORACLE_PASSWORD is required") + if not values["database"]: + raise ValueError("config ORACLE_DB is required") + return values + + +SQL_CREATE_TABLE = """ +CREATE TABLE IF NOT EXISTS {table_name} ( + id varchar2(100) + ,text CLOB NOT NULL + ,meta JSON + ,embedding vector NOT NULL +) +""" + + +class OracleVector(BaseVector): + def __init__(self, collection_name: str, config: OracleVectorConfig): + super().__init__(collection_name) + self.pool = self._create_connection_pool(config) + self.table_name = f"embedding_{collection_name}" + + def get_type(self) -> str: + return VectorType.ORACLE + + def numpy_converter_in(self, value): + if value.dtype == numpy.float64: + dtype = "d" + elif value.dtype == numpy.float32: + dtype = "f" + else: + dtype = "b" + return array.array(dtype, value) + + def input_type_handler(self, cursor, value, arraysize): + if isinstance(value, numpy.ndarray): + return cursor.var( + oracledb.DB_TYPE_VECTOR, + arraysize=arraysize, + inconverter=self.numpy_converter_in, + ) + + def numpy_converter_out(self, value): + if value.typecode == "b": + dtype = numpy.int8 + elif value.typecode == "f": + dtype = numpy.float32 + else: + dtype = numpy.float64 + return numpy.array(value, copy=False, dtype=dtype) + + def output_type_handler(self, cursor, metadata): + if metadata.type_code is oracledb.DB_TYPE_VECTOR: + return cursor.var( + metadata.type_code, + arraysize=cursor.arraysize, + outconverter=self.numpy_converter_out, + ) + def _create_connection_pool(self, config: OracleVectorConfig): + return oracledb.create_pool(user=config.user, password=config.password, dsn="{}:{}/{}".format(config.host, config.port, config.database), min=1, max=50, increment=1) + + + @contextmanager + def _get_cursor(self): + conn = self.pool.acquire() + conn.inputtypehandler = self.input_type_handler + conn.outputtypehandler = self.output_type_handler + cur = conn.cursor() + try: + yield cur + finally: + cur.close() + conn.commit() + conn.close() + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + dimension = len(embeddings[0]) + self._create_collection(dimension) + return self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + values = [] + pks = [] + for i, doc in enumerate(documents): + doc_id = doc.metadata.get("doc_id", str(uuid.uuid4())) + pks.append(doc_id) + values.append( + ( + doc_id, + doc.page_content, + json.dumps(doc.metadata), + #array.array("f", embeddings[i]), + numpy.array(embeddings[i]), + ) + ) + #print(f"INSERT INTO {self.table_name} (id, text, meta, embedding) VALUES (:1, :2, :3, :4)") + with self._get_cursor() as cur: + cur.executemany(f"INSERT INTO {self.table_name} (id, text, meta, embedding) VALUES (:1, :2, :3, :4)", values) + return pks + + def text_exists(self, id: str) -> bool: + with self._get_cursor() as cur: + cur.execute(f"SELECT id FROM {self.table_name} WHERE id = '%s'" % (id,)) + return cur.fetchone() is not None + + def get_by_ids(self, ids: list[str]) -> list[Document]: + with self._get_cursor() as cur: + cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN %s", (tuple(ids),)) + docs = [] + for record in cur: + docs.append(Document(page_content=record[1], metadata=record[0])) + return docs + #def get_ids_by_metadata_field(self, key: str, value: str): + # with self._get_cursor() as cur: + # cur.execute(f"SELECT id FROM {self.table_name} d WHERE d.meta.{key}='{value}'" ) + # idss = [] + # for record in cur: + # idss.append(record[0]) + # return idss + + #def delete_by_document_id(self, document_id: str): + # ids = self.get_ids_by_metadata_field('doc_id', document_id) + # if len(ids)>0: + # with self._get_cursor() as cur: + # cur.execute(f"delete FROM {self.table_name} d WHERE d.meta.doc_id in '%s'" % ("','".join(ids),)) + + + def delete_by_ids(self, ids: list[str]) -> None: + with self._get_cursor() as cur: + cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s" % (tuple(ids),)) + + def delete_by_metadata_field(self, key: str, value: str) -> None: + with self._get_cursor() as cur: + cur.execute(f"DELETE FROM {self.table_name} WHERE meta->>%s = %s", (key, value)) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + """ + Search the nearest neighbors to a vector. + + :param query_vector: The input vector to search for similar items. + :param top_k: The number of nearest neighbors to return, default is 5. + :return: List of Documents that are nearest to the query vector. + """ + top_k = kwargs.get("top_k", 5) + with self._get_cursor() as cur: + cur.execute( + f"SELECT meta, text, vector_distance(embedding,:1) AS distance FROM {self.table_name} ORDER BY distance fetch first {top_k} rows only" ,[numpy.array(query_vector)] + ) + docs = [] + score_threshold = kwargs.get("score_threshold") if kwargs.get("score_threshold") else 0.0 + for record in cur: + metadata, text, distance = record + score = 1 - distance + metadata["score"] = score + if score > score_threshold: + docs.append(Document(page_content=text, metadata=metadata)) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + # do not support bm25 search + return [] + + def delete(self) -> None: + with self._get_cursor() as cur: + cur.execute(f"DROP TABLE IF EXISTS {self.table_name}") + + def _create_collection(self, dimension: int): + cache_key = f"vector_indexing_{self._collection_name}" + lock_name = f"{cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return + + with self._get_cursor() as cur: + cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name)) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + +class OracleVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> OracleVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps( + self.gen_index_struct_dict(VectorType.ORACLE, collection_name)) + + config = current_app.config + return OracleVector( + collection_name=collection_name, + config=OracleVectorConfig( + host=config.get("ORACLE_HOST"), + port=config.get("ORACLE_PORT"), + user=config.get("ORACLE_USER"), + password=config.get("ORACLE_PASSWORD"), + database=config.get("ORACLE_DATABASE"), + ), + ) diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 2372976bad3770..3af85854d27f41 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -210,7 +210,7 @@ def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings dataset_id = dataset.id collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() dataset.index_struct = json.dumps( - self.gen_index_struct_dict(VectorType.TIDB_VECTOR, collection_name)) + self.gen_index_struct_dict(VectorType.TENCENT, collection_name)) config = current_app.config return TencentVector( diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index 8882cb2170b3a8..719e2b9a23cbb4 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -78,6 +78,9 @@ def get_vector_factory(vector_type: str) -> type[AbstractVectorFactory]: case VectorType.TENCENT: from core.rag.datasource.vdb.tencent.tencent_vector import TencentVectorFactory return TencentVectorFactory + case VectorType.ORACLE: + from core.rag.datasource.vdb.oracle.oraclevector import OracleVectorFactory + return OracleVectorFactory case VectorType.OPENSEARCH: from core.rag.datasource.vdb.opensearch.opensearch_vector import OpenSearchVectorFactory return OpenSearchVectorFactory diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index 4a27e52706d369..dbd5afcb3ea751 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -12,3 +12,4 @@ class VectorType(str, Enum): WEAVIATE = 'weaviate' OPENSEARCH = 'opensearch' TENCENT = 'tencent' + ORACLE = 'oracle' diff --git a/api/core/rag/extractor/excel_extractor.py b/api/core/rag/extractor/excel_extractor.py index 4d2f61139aedaa..931297c95ec325 100644 --- a/api/core/rag/extractor/excel_extractor.py +++ b/api/core/rag/extractor/excel_extractor.py @@ -2,7 +2,6 @@ from typing import Optional import pandas as pd -import xlrd from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -28,61 +27,19 @@ def __init__( self._autodetect_encoding = autodetect_encoding def extract(self) -> list[Document]: - """ parse excel file""" - if self._file_path.endswith('.xls'): - return self._extract4xls() - elif self._file_path.endswith('.xlsx'): - return self._extract4xlsx() - - def _extract4xls(self) -> list[Document]: - wb = xlrd.open_workbook(filename=self._file_path) + """ Load from Excel file in xls or xlsx format using Pandas.""" documents = [] - # loop over all sheets - for sheet in wb.sheets(): - row_header = None - for row_index, row in enumerate(sheet.get_rows(), start=1): - if self.is_blank_row(row): - continue - if row_header is None: - row_header = row - continue - item_arr = [] - for index, cell in enumerate(row): - txt_value = str(cell.value) - item_arr.append(f'"{row_header[index].value}":"{txt_value}"') - item_str = ",".join(item_arr) - document = Document(page_content=item_str, metadata={'source': self._file_path}) - documents.append(document) - return documents - - def _extract4xlsx(self) -> list[Document]: - """Load from file path using Pandas.""" - data = [] # Read each worksheet of an Excel file using Pandas - xls = pd.ExcelFile(self._file_path) - for sheet_name in xls.sheet_names: - df = pd.read_excel(xls, sheet_name=sheet_name) + excel_file = pd.ExcelFile(self._file_path) + for sheet_name in excel_file.sheet_names: + df: pd.DataFrame = excel_file.parse(sheet_name=sheet_name) # filter out rows with all NaN values df.dropna(how='all', inplace=True) # transform each row into a Document - for _, row in df.iterrows(): - item = ';'.join(f'"{k}":"{v}"' for k, v in row.items() if pd.notna(v)) - document = Document(page_content=item, metadata={'source': self._file_path}) - data.append(document) - return data + documents += [Document(page_content=';'.join(f'"{k}":"{v}"' for k, v in row.items() if pd.notna(v)), + metadata={'source': self._file_path}, + ) for _, row in df.iterrows()] - @staticmethod - def is_blank_row(row): - """ - - Determine whether the specified line is a blank line. - :param row: row object。 - :return: Returns True if the row is blank, False otherwise. - """ - # Iterates through the cells and returns False if a non-empty cell is found - for cell in row: - if cell.value is not None and cell.value != '': - return False - return True + return documents diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py index bf963f3442fd6c..442f29f33dfcef 100644 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py +++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_search.py @@ -2,9 +2,20 @@ from duckduckgo_search import DDGS +from core.model_runtime.entities.message_entities import SystemPromptMessage from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool +SUMMARY_PROMPT = """ +User's query: +{query} + +Here is the search engine result: +{content} + +Please summarize the result in a few sentences. +""" + class DuckDuckGoSearchTool(BuiltinTool): """ @@ -25,5 +36,12 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMe results = [res.get("body") for res in response] results = "\n".join(results) if require_summary: - results = self.summary(user_id=user_id, content=results) + results = self.summary_results(user_id=user_id, content=results, query=query) return self.create_text_message(text=results) + + def summary_results(self, user_id: str, content: str, query: str) -> str: + prompt = SUMMARY_PROMPT.format(query=query, content=content) + summary = self.invoke_model(user_id=user_id, prompt_messages=[ + SystemPromptMessage(content=prompt), + ], stop=[]) + return summary.message.content diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py index 637dd70e55d622..ec2927675e14b0 100644 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py +++ b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.py @@ -42,17 +42,18 @@ def _invoke(self, result_str = '' if result_type == 'first sd_name': - result_str = models_data[0]['sd_name_in_api'] + result_str = models_data[0]['sd_name_in_api'] if len(models_data) > 0 else '' elif result_type == 'first name sd_name pair': - result_str = json.dumps({'name': models_data[0]['name'], 'sd_name': models_data[0]['sd_name_in_api']}) + result_str = json.dumps({'name': models_data[0]['name'], 'sd_name': models_data[0]['sd_name_in_api']}) if len(models_data) > 0 else '' elif result_type == 'sd_name array': - sd_name_array = [model['sd_name_in_api'] for model in models_data] + sd_name_array = [model['sd_name_in_api'] for model in models_data] if len(models_data) > 0 else [] result_str = json.dumps(sd_name_array) elif result_type == 'name array': - name_array = [model['name'] for model in models_data] + name_array = [model['name'] for model in models_data] if len(models_data) > 0 else [] result_str = json.dumps(name_array) elif result_type == 'name sd_name pair array': - name_sd_name_pair_array = [{'name': model['name'], 'sd_name': model['sd_name_in_api']} for model in models_data] + name_sd_name_pair_array = [{'name': model['name'], 'sd_name': model['sd_name_in_api']} + for model in models_data] if len(models_data) > 0 else [] result_str = json.dumps(name_sd_name_pair_array) elif result_type == 'whole info array': result_str = json.dumps(models_data) diff --git a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml index a933f76d0e3ed5..a14795e45e0e4f 100644 --- a/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml +++ b/api/core/tools/provider/builtin/novitaai/tools/novitaai_modelquery.yaml @@ -19,7 +19,8 @@ parameters: human_description: en_US: Seaching the content of sd_name, name, tags. zh_Hans: 搜索 sd_name、name、tags 中的内容 - form: form + llm_description: Enter the content to search + form: llm - name: result_type type: select default: "first sd_name" diff --git a/api/core/workflow/nodes/http_request/entities.py b/api/core/workflow/nodes/http_request/entities.py index c027f9fd29b7c8..00d72a8b0aca83 100644 --- a/api/core/workflow/nodes/http_request/entities.py +++ b/api/core/workflow/nodes/http_request/entities.py @@ -22,7 +22,7 @@ class Config(BaseModel): header: Union[None, str] = None type: Literal['no-auth', 'api-key'] - config: Optional[Config] + config: Optional[Config] = None @field_validator('config', mode='before') @classmethod @@ -52,6 +52,6 @@ class Timeout(BaseModel): authorization: Authorization headers: str params: str - body: Optional[Body] + body: Optional[Body] = None timeout: Optional[Timeout] = None mask_authorization_header: Optional[bool] = True diff --git a/api/fields/app_fields.py b/api/fields/app_fields.py index 212c3e7f179862..e314fa21a38bce 100644 --- a/api/fields/app_fields.py +++ b/api/fields/app_fields.py @@ -117,6 +117,7 @@ 'customize_token_strategy': fields.String, 'prompt_public': fields.Boolean, 'app_base_url': fields.String, + 'show_workflow_steps': fields.Boolean, } app_detail_fields_with_site = { @@ -149,5 +150,6 @@ 'privacy_policy': fields.String, 'custom_disclaimer': fields.String, 'customize_token_strategy': fields.String, - 'prompt_public': fields.Boolean + 'prompt_public': fields.Boolean, + 'show_workflow_steps': fields.Boolean, } diff --git a/api/libs/helper.py b/api/libs/helper.py index fa326c5a532d05..ebabb2ea474b6a 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -140,7 +140,7 @@ def generate_string(n): return result -def get_remote_ip(request): +def get_remote_ip(request) -> str: if request.headers.get('CF-Connecting-IP'): return request.headers.get('Cf-Connecting-Ip') elif request.headers.getlist("X-Forwarded-For"): diff --git a/api/migrations/README b/api/migrations/README index 220678df7ab06e..0e048441597444 100644 --- a/api/migrations/README +++ b/api/migrations/README @@ -1,2 +1 @@ Single-database configuration for Flask. - diff --git a/api/migrations/versions/4ff534e1eb11_add_workflow_to_site.py b/api/migrations/versions/4ff534e1eb11_add_workflow_to_site.py new file mode 100644 index 00000000000000..c09cf2af60cdff --- /dev/null +++ b/api/migrations/versions/4ff534e1eb11_add_workflow_to_site.py @@ -0,0 +1,33 @@ +"""add workflow to site + +Revision ID: 4ff534e1eb11 +Revises: 7b45942e39bb +Create Date: 2024-06-21 04:16:03.419634 + +""" +import sqlalchemy as sa +from alembic import op + +import models as models + +# revision identifiers, used by Alembic. +revision = '4ff534e1eb11' +down_revision = '7b45942e39bb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.add_column(sa.Column('show_workflow_steps', sa.Boolean(), server_default=sa.text('true'), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.drop_column('show_workflow_steps') + + # ### end Alembic commands ### diff --git a/api/models/model.py b/api/models/model.py index 657db5a5c2e274..3024be0b4c86b6 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1043,6 +1043,7 @@ class Site(db.Model): default_language = db.Column(db.String(255), nullable=False) copyright = db.Column(db.String(255)) privacy_policy = db.Column(db.String(255)) + show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text('true')) custom_disclaimer = db.Column(db.String(255), nullable=True) customize_domain = db.Column(db.String(255)) customize_token_strategy = db.Column(db.String(255), nullable=False) diff --git a/api/poetry.lock b/api/poetry.lock index ac918a7783f7b9..89140ce75ec80a 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -256,6 +256,63 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + [[package]] name = "arxiv" version = "2.1.0" @@ -515,54 +572,53 @@ crt = ["awscrt (==0.19.12)"] [[package]] name = "bottleneck" -version = "1.3.8" +version = "1.4.0" description = "Fast NumPy array functions written in C" optional = false python-versions = "*" files = [ - {file = "Bottleneck-1.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:865c8ed5b798c0198b0b80553e09cc0d890c4f5feb3d81d31661517ca7819fa3"}, - {file = "Bottleneck-1.3.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d073a31e259d40b25e29dbba80f73abf38afe98fd730c79dad7edd9a0ad6cff5"}, - {file = "Bottleneck-1.3.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b806b277ab47495032822f55f43b8d336e4b7e73f8506ed34d3ea3da6d644abc"}, - {file = "Bottleneck-1.3.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:770b517609916adeb39d3b1a386a29bc316da03dd61e7ee6e8a38325b80cc327"}, - {file = "Bottleneck-1.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2948502b0394ee419945b55b092585222a505c61d41a874c741be49f2cac056f"}, - {file = "Bottleneck-1.3.8-cp310-cp310-win32.whl", hash = "sha256:271b6333522beb8aee32e640ba49a2064491d2c10317baa58a5996be3dd443e4"}, - {file = "Bottleneck-1.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:d41000ea7ca196b5fd39d6fccd34bf0704c8831731cedd2da2dcae3c6ac49c42"}, - {file = "Bottleneck-1.3.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0a7f454394cd3642498b6e077e70f4a6b9fd46a8eb908c83ac737fdc9f9a98c"}, - {file = "Bottleneck-1.3.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c4ea8b9024dcb4e83b5c118a3c8faa863ace2ad572849da548a74a8ee4e8f2a"}, - {file = "Bottleneck-1.3.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40724b6e965ff5b88b333d4a10097b1629e60c0db21bb3d08c24d7b1a904a16"}, - {file = "Bottleneck-1.3.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4bd7183b8dcca89d0e65abe4507c19667dd31dacfbcc8ed705bad642f26a46e1"}, - {file = "Bottleneck-1.3.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:20aa31a7d9d747c499ace1610a6e1f7aba6e3d4a9923e0312f6b4b6d68a59af3"}, - {file = "Bottleneck-1.3.8-cp311-cp311-win32.whl", hash = "sha256:350520105d9449e6565b3f0c4ce1f80a0b3e4d63695ebbf29db41f62e13f6461"}, - {file = "Bottleneck-1.3.8-cp311-cp311-win_amd64.whl", hash = "sha256:167a278902775defde7dfded6e98e3707dfe54971ffd9aec25c43bc74e4e381a"}, - {file = "Bottleneck-1.3.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c6e93ed45c6c83392f73d0333b310b38772df7eb78c120c1447245691bdedaf4"}, - {file = "Bottleneck-1.3.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3400f47dda0196b5af50b0b0678e33cc8c42e52e55ae0a63cdfed60725659bc"}, - {file = "Bottleneck-1.3.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fba5fd1805c71b2eeea50bea93d59be449c4af23ebd8da5f75fd74fd0331e314"}, - {file = "Bottleneck-1.3.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:60139c5c3d2a9c1454a04af5ee981a9f56548d27fa36f264069b149a6e9b01ed"}, - {file = "Bottleneck-1.3.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:99fab17fa26c811ccad63e208314726e718ae6605314329eca09641954550523"}, - {file = "Bottleneck-1.3.8-cp312-cp312-win32.whl", hash = "sha256:d3ae2bb5d4168912e438e377cc1301fa01df949ba59cd86317b3e00404fd4a97"}, - {file = "Bottleneck-1.3.8-cp312-cp312-win_amd64.whl", hash = "sha256:bcba1d5d5328c50f94852ab521fcb26f35d9e0ccd928d120d56455d1a5bb743f"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8d01fd5389d3160d54619119987ac24b020fa6810b7b398fff4945892237b3da"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca25f0003ef65264942f6306d793e0f270ece8b406c5a293dfc7d878146e9f8"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7763cf1516fa388c3587d12182fc1bc1c8089eab1a0a1bf09761f4c41af73c"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:38837c022350e2a656453f0e448416b7108cf67baccf11d04a0b3b70a48074dd"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ca5e741fae1c1796744dbdd0d2c1789cb74dd79c12ea8ec5834f83430f8520"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-win32.whl", hash = "sha256:f4dfc22a3450227e692ef2ff4657639c33eec88ad04ee3ce29d1a23a4942da24"}, - {file = "Bottleneck-1.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:90b87eed152bbd760c4eb11473c2cf036abdb26e2f84caeb00787da74fb08c40"}, - {file = "Bottleneck-1.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54a1b5d9d63b2d9f2955f8542eea26c418f97873e0abf86ca52beea0208c9306"}, - {file = "Bottleneck-1.3.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:019dd142d1e870388fb0b649213a0d8e569cce784326e183deba8f17826edd9f"}, - {file = "Bottleneck-1.3.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ed34a540eb7df59f45da659af9f792306637de1c69c95f020294f3b9fc4a8"}, - {file = "Bottleneck-1.3.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b69fcd4d818bcf9d53497d8accd0d5f852a447728baaa33b9b7168f8c4221d06"}, - {file = "Bottleneck-1.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:02616a830bd477f5ba51103396092da4b9d83cea2e88f5b8069e3f4f7b796704"}, - {file = "Bottleneck-1.3.8-cp38-cp38-win32.whl", hash = "sha256:93d359fb83eb3bdd6635ef6e64835c38ffdc211441fc190549f286e6af98b5f6"}, - {file = "Bottleneck-1.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:51c8bb3dffeb72c14f0382b80de76eabac6726d316babbd48f7e4056267d7910"}, - {file = "Bottleneck-1.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84453548b0f722c3be912ce3c6b685917fea842bf1252eeb63714a2c1fd1ffc9"}, - {file = "Bottleneck-1.3.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92700867504a213cafa9b8d9be529bd6e18dc83366b2ba00e86e80769b93f678"}, - {file = "Bottleneck-1.3.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadfd2f3931fdff42f4b9867eb02ed7c662d01e6099ff6b347b6ced791450651"}, - {file = "Bottleneck-1.3.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:cfbc4a3a934b677bfbc37ac8757c4e1264a76262b774259bd3fa8a265dbd668b"}, - {file = "Bottleneck-1.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3c74c18f86a1ffac22280b005df8bb8a58505ac6663c4d6807f39873c17dc347"}, - {file = "Bottleneck-1.3.8-cp39-cp39-win32.whl", hash = "sha256:211f881159e8adb3a57df2263028ae6dc89ec4328bfd43f3421e507406c28654"}, - {file = "Bottleneck-1.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8615eeb75009ba7c0a112a5a6a5154ed3d61fd6b0879631778b3e42e2d9a6d65"}, - {file = "Bottleneck-1.3.8.tar.gz", hash = "sha256:6780d896969ba7f53c8995ba90c87c548beb3db435dc90c60b9a10ed1ab4d868"}, + {file = "Bottleneck-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2110af22aa8c2779faba8aa021d6b559df04449bdf21d510eacd7910934189fe"}, + {file = "Bottleneck-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:381cbd1e52338fcdf9ff01c962e6aa187b2d8b3b369d42e779b6d33ac61f8d35"}, + {file = "Bottleneck-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a91e40bbb8452e77772614d882be2c34b3b514d9f15460f703293525a6e173d"}, + {file = "Bottleneck-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:59604949aea476f5075b965129eaa3c2d90891fd43b0dfaf2ad7621bb5db14a5"}, + {file = "Bottleneck-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c2c92545e1bc8e859d8d137aefa3b24843bd374b17c9814dafa3bbcea9fc4ec0"}, + {file = "Bottleneck-1.4.0-cp310-cp310-win32.whl", hash = "sha256:f63e79bfa2f82a7432c8b147ed321d01ca7769bc17cc04644286a4ce58d30549"}, + {file = "Bottleneck-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:d69907d8d679cb5091a3f479c46bf1076f149f6311ff3298bac5089b86a2fab1"}, + {file = "Bottleneck-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67347b0f01f32a232a6269c37afc1c079e08f6455fa12e91f4a1cd12eb0d11a5"}, + {file = "Bottleneck-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1490348b3bbc0225523dc2c00c6bb3e66168c537d62797bd29783c0826c09838"}, + {file = "Bottleneck-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a704165552496cbcc8bcc5921bb679fd6fa66bb1e758888de091b1223231c9f0"}, + {file = "Bottleneck-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ffb4e4edf7997069719b9269926cc00a2a12c6e015422d1ebc2f621c4541396a"}, + {file = "Bottleneck-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5d6bf45ed58d5e7414c0011ef2da75474fe597a51970df83596b0bcb79c14c5e"}, + {file = "Bottleneck-1.4.0-cp311-cp311-win32.whl", hash = "sha256:ed209f8f3cb9954773764b0fa2510a7a9247ad245593187ac90bd0747771bc5c"}, + {file = "Bottleneck-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d53f1a72b12cfd76b56934c33bc0cb7c1a295f23a2d3ffba8c764514c9b5e0ff"}, + {file = "Bottleneck-1.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e720ff24370324c84a82b1a18195274715c23181748b2b9e3dacad24198ca06f"}, + {file = "Bottleneck-1.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44305c70c2a1539b0ae968e033f301ad868a6146b47e3cccd73fdfe3fc07c4ee"}, + {file = "Bottleneck-1.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4dac5d2a871b7bd296c2b92426daa27d5b07aa84ef2557db097d29135da4eb"}, + {file = "Bottleneck-1.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fbcdd01db9e27741fb16a02b720cf02389d4b0b99cefe3c834c7df88c2d7412d"}, + {file = "Bottleneck-1.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:14b3334a39308fbb05dacd35ac100842aa9e9bc70afbdcebe43e46179d183fd0"}, + {file = "Bottleneck-1.4.0-cp312-cp312-win32.whl", hash = "sha256:520d7a83cd48b3f58e5df1a258acb547f8a5386a8c21ca9e1058d83a0d622fdf"}, + {file = "Bottleneck-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b1339b9ad3ee217253f246cde5c3789eb527cf9dd31ff0a1f5a8bf7fc89eadad"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2749602200aaa0e12a0f3f936dd6d4035384ad10d3acf7ac4f418c501683397"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb79a2ac135567694f13339f0bebcee96aec09c596b324b61cd7fd5e306f49d"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c6097bf39723e76ff5bba160daab92ae599df212c859db8d46648548584d04a8"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b5f72b66ccc0272de46b67346cf8490737ba2adc6a302664f5326e7741b6d5ab"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:9903f017b9d6f2f69ce241b424ddad7265624f64dc6eafbe257d45661febf8bd"}, + {file = "Bottleneck-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:834816c316ad184cae7ecb615b69876a42cd2cafb07ee66c57a9c1ccacb63339"}, + {file = "Bottleneck-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03c43150f180d86a5633a6da788660d335983f6798fca306ba7f47ff27a1b7e7"}, + {file = "Bottleneck-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea333dbcadb780356c54f5c4fa7754f143573b57508fff43d5daf63298eb26a"}, + {file = "Bottleneck-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6179791c0119aec3708ef74ddadab8d183e3742adb93a9028718e8696bdf572b"}, + {file = "Bottleneck-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:220b72405f77aebb0137b733b464c2526ded471e4289ac1e840bab8852759a55"}, + {file = "Bottleneck-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8746f0f727997ce4c7457dc1fec4e4e3c0fdd8803514baa3d1c4ea6515ab04b2"}, + {file = "Bottleneck-1.4.0-cp38-cp38-win32.whl", hash = "sha256:6a36280ee33d9db799163f04e88b950261e590cc71d089f5e179b21680b5d491"}, + {file = "Bottleneck-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:de17e012694e6a987bb4eb050dd7f0cf939195a8e00cb23aa93ebee5fd5e64a8"}, + {file = "Bottleneck-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28260197ab8a4a6b7adf810523147b1a3e85607f4e26a0f685eb9d155cfc75af"}, + {file = "Bottleneck-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90d5d188a0cca0b9655ff2904ee61e7f183079e97550be98c2541a2eec358a72"}, + {file = "Bottleneck-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2861ff645d236f1a6f5c6d1ddb3db37d19af1d91057bdc4fd7b76299a15b3079"}, + {file = "Bottleneck-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6136ce7dcf825c432a20b80ab1c460264a437d8430fff32536176147e0b6b832"}, + {file = "Bottleneck-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:889e6855b77345622b4ba927335d3118745d590492941f5f78554f157d259e92"}, + {file = "Bottleneck-1.4.0-cp39-cp39-win32.whl", hash = "sha256:817aa43a671ede696ea023d8f35839a391244662340cc95a0f46965dda8b35cf"}, + {file = "Bottleneck-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:23834d82177d6997f21fa63156550668cd07a9a6e5a1b66ea80f1a14ac6ffd07"}, + {file = "bottleneck-1.4.0.tar.gz", hash = "sha256:beb36df519b8709e7d357c0c9639b03b885ca6355bbf5e53752c685de51605b8"}, ] [package.dependencies] @@ -1031,13 +1087,13 @@ numpy = "*" [[package]] name = "chromadb" -version = "0.5.1" +version = "0.5.3" description = "Chroma." optional = false python-versions = ">=3.8" files = [ - {file = "chromadb-0.5.1-py3-none-any.whl", hash = "sha256:61f1f75a672b6edce7f1c8875c67e2aaaaf130dc1c1684431fbc42ad7240d01d"}, - {file = "chromadb-0.5.1.tar.gz", hash = "sha256:e2b2b6a34c2a949bedcaa42fa7775f40c7f6667848fc8094dcbf97fc0d30bee7"}, + {file = "chromadb-0.5.3-py3-none-any.whl", hash = "sha256:b3874f08356e291c68c6d2e177db472cd51f22f3af7b9746215b748fd1e29982"}, + {file = "chromadb-0.5.3.tar.gz", hash = "sha256:05d887f56a46b2e0fc6ac5ab979503a27b9ee50d5ca9e455f83b2fb9840cd026"}, ] [package.dependencies] @@ -1784,19 +1840,19 @@ files = [ [[package]] name = "duckduckgo-search" -version = "6.1.6" +version = "6.1.7" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-6.1.6-py3-none-any.whl", hash = "sha256:6139ab17579e96ca7c5ed9398365245a36ecca8e7432545e3115ef90a9304eb7"}, - {file = "duckduckgo_search-6.1.6.tar.gz", hash = "sha256:42c83d58f4f1d717a580b89cc86861cbae59e46e75288243776c53349d006bf1"}, + {file = "duckduckgo_search-6.1.7-py3-none-any.whl", hash = "sha256:ec7d5becb8c392c0293ff9464938c1014896e1e14725c05adc306290a636fab2"}, + {file = "duckduckgo_search-6.1.7.tar.gz", hash = "sha256:c6fd8ba17fe9cd0a4f32e5b96984e959c3da865f9c2864bfcf82bf7ff9b7e8f0"}, ] [package.dependencies] click = ">=8.1.7" -orjson = ">=3.10.4" -pyreqwest-impersonate = ">=0.4.7" +orjson = ">=3.10.5" +pyreqwest-impersonate = ">=0.4.8" [package.extras] dev = ["mypy (>=1.10.0)", "pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "ruff (>=0.4.8)"] @@ -1804,13 +1860,13 @@ lxml = ["lxml (>=5.2.2)"] [[package]] name = "email-validator" -version = "2.1.1" +version = "2.1.2" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, + {file = "email_validator-2.1.2-py3-none-any.whl", hash = "sha256:d89f6324e13b1e39889eab7f9ca2f91dc9aebb6fa50a6d8bd4329ab50f251115"}, + {file = "email_validator-2.1.2.tar.gz", hash = "sha256:14c0f3d343c4beda37400421b39fa411bbe33a75df20825df73ad53e06a9f04c"}, ] [package.dependencies] @@ -2001,18 +2057,18 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.3" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, + {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -3847,32 +3903,32 @@ six = "*" [[package]] name = "llvmlite" -version = "0.42.0" +version = "0.43.0" description = "lightweight wrapper around basic LLVM functionality" optional = false python-versions = ">=3.9" files = [ - {file = "llvmlite-0.42.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3366938e1bf63d26c34fbfb4c8e8d2ded57d11e0567d5bb243d89aab1eb56098"}, - {file = "llvmlite-0.42.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c35da49666a21185d21b551fc3caf46a935d54d66969d32d72af109b5e7d2b6f"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70f44ccc3c6220bd23e0ba698a63ec2a7d3205da0d848804807f37fc243e3f77"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f8d8717a9073b9e0246998de89929071d15b47f254c10eef2310b9aac033d"}, - {file = "llvmlite-0.42.0-cp310-cp310-win_amd64.whl", hash = "sha256:8d90edf400b4ceb3a0e776b6c6e4656d05c7187c439587e06f86afceb66d2be5"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ae511caed28beaf1252dbaf5f40e663f533b79ceb408c874c01754cafabb9cbf"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81e674c2fe85576e6c4474e8c7e7aba7901ac0196e864fe7985492b737dbab65"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3975787f13eb97629052edb5017f6c170eebc1c14a0433e8089e5db43bcce6"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5bece0cdf77f22379f19b1959ccd7aee518afa4afbd3656c6365865f84903f9"}, - {file = "llvmlite-0.42.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e0c4c11c8c2aa9b0701f91b799cb9134a6a6de51444eff5a9087fc7c1384275"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:08fa9ab02b0d0179c688a4216b8939138266519aaa0aa94f1195a8542faedb56"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2fce7d355068494d1e42202c7aff25d50c462584233013eb4470c33b995e3ee"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe66a86dc44634b59a3bc860c7b20d26d9aaffcd30364ebe8ba79161a9121f4"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d47494552559e00d81bfb836cf1c4d5a5062e54102cc5767d5aa1e77ccd2505c"}, - {file = "llvmlite-0.42.0-cp312-cp312-win_amd64.whl", hash = "sha256:05cb7e9b6ce69165ce4d1b994fbdedca0c62492e537b0cc86141b6e2c78d5888"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdd3888544538a94d7ec99e7c62a0cdd8833609c85f0c23fcb6c5c591aec60ad"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0936c2067a67fb8816c908d5457d63eba3e2b17e515c5fe00e5ee2bace06040"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a78ab89f1924fc11482209f6799a7a3fc74ddc80425a7a3e0e8174af0e9e2301"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7599b65c7af7abbc978dbf345712c60fd596aa5670496561cc10e8a71cebfb2"}, - {file = "llvmlite-0.42.0-cp39-cp39-win_amd64.whl", hash = "sha256:43d65cc4e206c2e902c1004dd5418417c4efa6c1d04df05c6c5675a27e8ca90e"}, - {file = "llvmlite-0.42.0.tar.gz", hash = "sha256:f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, + {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, + {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, + {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, + {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, + {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, ] [[package]] @@ -4191,17 +4247,23 @@ files = [ ] [[package]] -name = "milvus-lite" -version = "2.4.7" -description = "A lightweight version of Milvus wrapped with Python." +name = "minio" +version = "7.2.7" +description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "milvus_lite-2.4.7-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:c828190118b104b05b8c8e0b5a4147811c86b54b8fb67bc2e726ad10fc0b544e"}, - {file = "milvus_lite-2.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e1537633c39879714fb15082be56a4b97f74c905a6e98e302ec01320561081af"}, - {file = "milvus_lite-2.4.7-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f016474d663045787dddf1c3aad13b7d8b61fd329220318f858184918143dcbf"}, + {file = "minio-7.2.7-py3-none-any.whl", hash = "sha256:59d1f255d852fe7104018db75b3bebbd987e538690e680f7c5de835e422de837"}, + {file = "minio-7.2.7.tar.gz", hash = "sha256:473d5d53d79f340f3cd632054d0c82d2f93177ce1af2eac34a235bea55708d98"}, ] +[package.dependencies] +argon2-cffi = "*" +certifi = "*" +pycryptodome = "*" +typing-extensions = "*" +urllib3 = "*" + [[package]] name = "mmh3" version = "4.1.0" @@ -4588,37 +4650,37 @@ requests = ">=2.27.1" [[package]] name = "numba" -version = "0.59.1" +version = "0.60.0" description = "compiling Python code using LLVM" optional = false python-versions = ">=3.9" files = [ - {file = "numba-0.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97385a7f12212c4f4bc28f648720a92514bee79d7063e40ef66c2d30600fd18e"}, - {file = "numba-0.59.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b77aecf52040de2a1eb1d7e314497b9e56fba17466c80b457b971a25bb1576d"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3476a4f641bfd58f35ead42f4dcaf5f132569c4647c6f1360ccf18ee4cda3990"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:525ef3f820931bdae95ee5379c670d5c97289c6520726bc6937a4a7d4230ba24"}, - {file = "numba-0.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:990e395e44d192a12105eca3083b61307db7da10e093972ca285c85bef0963d6"}, - {file = "numba-0.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43727e7ad20b3ec23ee4fc642f5b61845c71f75dd2825b3c234390c6d8d64051"}, - {file = "numba-0.59.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:411df625372c77959570050e861981e9d196cc1da9aa62c3d6a836b5cc338966"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2801003caa263d1e8497fb84829a7ecfb61738a95f62bc05693fcf1733e978e4"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd2842fac03be4e5324ebbbd4d2d0c8c0fc6e0df75c09477dd45b288a0777389"}, - {file = "numba-0.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:0594b3dfb369fada1f8bb2e3045cd6c61a564c62e50cf1f86b4666bc721b3450"}, - {file = "numba-0.59.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1cce206a3b92836cdf26ef39d3a3242fec25e07f020cc4feec4c4a865e340569"}, - {file = "numba-0.59.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c8b4477763cb1fbd86a3be7050500229417bf60867c93e131fd2626edb02238"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d80bce4ef7e65bf895c29e3889ca75a29ee01da80266a01d34815918e365835"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7ad1d217773e89a9845886401eaaab0a156a90aa2f179fdc125261fd1105096"}, - {file = "numba-0.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bf68f4d69dd3a9f26a9b23548fa23e3bcb9042e2935257b471d2a8d3c424b7f"}, - {file = "numba-0.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e0318ae729de6e5dbe64c75ead1a95eb01fabfe0e2ebed81ebf0344d32db0ae"}, - {file = "numba-0.59.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f68589740a8c38bb7dc1b938b55d1145244c8353078eea23895d4f82c8b9ec1"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:649913a3758891c77c32e2d2a3bcbedf4a69f5fea276d11f9119677c45a422e8"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9712808e4545270291d76b9a264839ac878c5eb7d8b6e02c970dc0ac29bc8187"}, - {file = "numba-0.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:8d51ccd7008a83105ad6a0082b6a2b70f1142dc7cfd76deb8c5a862367eb8c86"}, - {file = "numba-0.59.1.tar.gz", hash = "sha256:76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b"}, -] - -[package.dependencies] -llvmlite = "==0.42.*" -numpy = ">=1.22,<1.27" + {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, + {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, + {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, + {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, + {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, + {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, + {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, + {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, + {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, + {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, + {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, + {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, + {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, +] + +[package.dependencies] +llvmlite = "==0.43.*" +numpy = ">=1.22,<2.1" [[package]] name = "numexpr" @@ -4816,13 +4878,13 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "openpyxl" -version = "3.1.3" +version = "3.1.4" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "openpyxl-3.1.3-py2.py3-none-any.whl", hash = "sha256:25071b558db709de9e8782c3d3e058af3b23ffb2fc6f40c8f0c45a154eced2c3"}, - {file = "openpyxl-3.1.3.tar.gz", hash = "sha256:8dd482e5350125b2388070bb2477927be2e8ebc27df61178709bc8c8751da2f9"}, + {file = "openpyxl-3.1.4-py2.py3-none-any.whl", hash = "sha256:ec17f6483f2b8f7c88c57e5e5d3b0de0e3fb9ac70edc084d28e864f5b33bbefd"}, + {file = "openpyxl-3.1.4.tar.gz", hash = "sha256:8d2c8adf5d20d6ce8f9bca381df86b534835e974ed0156dacefa76f68c1d69fb"}, ] [package.dependencies] @@ -5014,59 +5076,102 @@ files = [ {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"}, ] +[[package]] +name = "oracledb" +version = "2.2.1" +description = "Python interface to Oracle Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "oracledb-2.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3dacef7c4dd3fca94728f05336076e063450bb57ea569e8dd67fae960aaf537e"}, + {file = "oracledb-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd8fdc93a65ae2e1c934a0e3e64cb01997ba004c48a986a37583f670dd344802"}, + {file = "oracledb-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531600569febef29806f058d0f0900127356caccba47785d7ec0fca4714af132"}, + {file = "oracledb-2.2.1-cp310-cp310-win32.whl", hash = "sha256:9bbd2c33a97a91d92178d6c4ffa8676b0da80b9fd1329a5e6a09e01b8b2472b5"}, + {file = "oracledb-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:708edcaddfefa1f58a75f72df2ea0d39980ae126db85ea59a4c83eab40b5f61e"}, + {file = "oracledb-2.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb6d9a4d7400398b22edb9431334f9add884dec9877fd9c4ae531e1ccc6ee1fd"}, + {file = "oracledb-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07757c240afbb4f28112a6affc2c5e4e34b8a92e5bb9af81a40fba398da2b028"}, + {file = "oracledb-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63daec72f853c47179e98493e9b732909d96d495bdceb521c5973a3940d28142"}, + {file = "oracledb-2.2.1-cp311-cp311-win32.whl", hash = "sha256:fec5318d1e0ada7e4674574cb6c8d1665398e8b9c02982279107212f05df1660"}, + {file = "oracledb-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5134dccb5a11bc755abf02fd49be6dc8141dfcae4b650b55d40509323d00b5c2"}, + {file = "oracledb-2.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ac5716bc9a48247fdf563f5f4ec097f5c9f074a60fd130cdfe16699208ca29b5"}, + {file = "oracledb-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c150bddb882b7c73fb462aa2d698744da76c363e404570ed11d05b65811d96c3"}, + {file = "oracledb-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193e1888411bc21187ade4b16b76820bd1e8f216e25602f6cd0a97d45723c1dc"}, + {file = "oracledb-2.2.1-cp312-cp312-win32.whl", hash = "sha256:44a960f8bbb0711af222e0a9690e037b6a2a382e0559ae8eeb9cfafe26c7a3bc"}, + {file = "oracledb-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:470136add32f0d0084225c793f12a52b61b52c3dc00c9cd388ec6a3db3a7643e"}, + {file = "oracledb-2.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:506f0027a2c4b6e33b8aabaebd00e4e31cc85134aa82fd855f4817917cfc9d5e"}, + {file = "oracledb-2.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b8b46e6579eaca3b1436fa57bd666ad041d7f4dd3f9237f21d132cc8b52c04"}, + {file = "oracledb-2.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a47019561c5cd76d1f19b3a528a98285dca9d915dd8559555f3074424ee9438"}, + {file = "oracledb-2.2.1-cp37-cp37m-win32.whl", hash = "sha256:4b433ea6465de03315bf7c121ad9272b4eef0ecaf235d1743b06557ee587bf6e"}, + {file = "oracledb-2.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6af95303446966c808f3a6c1c33cb0343e9bf8ec57841cc804de0eb1bfa337b5"}, + {file = "oracledb-2.2.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7df0bebc28488655fbf64b9222d9a14e5ecd13254b426ef75da7adc80cbc18d9"}, + {file = "oracledb-2.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37564661ba93f5714969400fc8a57552e5ca4244d8ecc7044d29b4af4cf9a660"}, + {file = "oracledb-2.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9077cbbe7a2bad13e20af4276a1ef782029fc5601e9470b4b60f4bbb4144655b"}, + {file = "oracledb-2.2.1-cp38-cp38-win32.whl", hash = "sha256:406c1bacf8a12e993ffe148797a0eb98e62deac073195d5cfa076e78eea85c64"}, + {file = "oracledb-2.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:c1894be5800049c64cdba63f19b94bcb94c42e70f8a53d1dd2dfaa2882fa2096"}, + {file = "oracledb-2.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:78e64fa607b28f4de6ff4c6177ef10b8beae0b7fd43a76e78b2215defc1b73c6"}, + {file = "oracledb-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7d4999820f23bb5b28097885c8d18b6d6dce47a53aa59be66bf1c865c872b17"}, + {file = "oracledb-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0048148630b60fe42e598591be152bd863ef339dff1c3785b121313b94856223"}, + {file = "oracledb-2.2.1-cp39-cp39-win32.whl", hash = "sha256:49a16ccc64c52a83c9db40095d01b0f2ee7f8a20cb105c82ffc2f57151553cfd"}, + {file = "oracledb-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9e76d46d8260e33442cac259278885adf90080f7d2117eaeb4b230504827860b"}, + {file = "oracledb-2.2.1.tar.gz", hash = "sha256:8464c6f0295f3318daf6c2c72c83c2dcbc37e13f8fd44e3e39ff8665f442d6b6"}, +] + +[package.dependencies] +cryptography = ">=3.2.1" + [[package]] name = "orjson" -version = "3.10.4" +version = "3.10.5" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:afca963f19ca60c7aedadea9979f769139127288dd58ccf3f7c5e8e6dc62cabf"}, - {file = "orjson-3.10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b112eff36ba7ccc7a9d6b87e17b9d6bde4312d05e3ddf66bf5662481dee846"}, - {file = "orjson-3.10.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02b192eaba048b1039eca9a0cef67863bd5623042f5c441889a9957121d97e14"}, - {file = "orjson-3.10.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:827c3d0e4fc44242c82bfdb1a773235b8c0575afee99a9fa9a8ce920c14e440f"}, - {file = "orjson-3.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca8ec09724f10ec209244caeb1f9f428b6bb03f2eda9ed5e2c4dd7f2b7fabd44"}, - {file = "orjson-3.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8eaa5d531a8fde11993cbcb27e9acf7d9c457ba301adccb7fa3a021bfecab46c"}, - {file = "orjson-3.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e112aa7fc4ea67367ec5e86c39a6bb6c5719eddc8f999087b1759e765ddaf2d4"}, - {file = "orjson-3.10.4-cp310-none-win32.whl", hash = "sha256:1538844fb88446c42da3889f8c4ecce95a630b5a5ba18ecdfe5aea596f4dff21"}, - {file = "orjson-3.10.4-cp310-none-win_amd64.whl", hash = "sha256:de02811903a2e434127fba5389c3cc90f689542339a6e52e691ab7f693407b5a"}, - {file = "orjson-3.10.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:358afaec75de7237dfea08e6b1b25d226e33a1e3b6dc154fc99eb697f24a1ffa"}, - {file = "orjson-3.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4e292c3198ab3d93e5f877301d2746be4ca0ba2d9c513da5e10eb90e19ff52"}, - {file = "orjson-3.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c39e57cf6323a39238490092985d5d198a7da4a3be013cc891a33fef13a536e"}, - {file = "orjson-3.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f86df433fc01361ff9270ad27455ce1ad43cd05e46de7152ca6adb405a16b2f6"}, - {file = "orjson-3.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c9966276a2c97e93e6cbe8286537f88b2a071827514f0d9d47a0aefa77db458"}, - {file = "orjson-3.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c499a14155a1f5a1e16e0cd31f6cf6f93965ac60a0822bc8340e7e2d3dac1108"}, - {file = "orjson-3.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3087023ce904a327c29487eb7e1f2c060070e8dbb9a3991b8e7952a9c6e62f38"}, - {file = "orjson-3.10.4-cp311-none-win32.whl", hash = "sha256:f965893244fe348b59e5ce560693e6dd03368d577ce26849b5d261ce31c70101"}, - {file = "orjson-3.10.4-cp311-none-win_amd64.whl", hash = "sha256:c212f06fad6aa6ce85d5665e91a83b866579f29441a47d3865c57329c0857357"}, - {file = "orjson-3.10.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d0965a8b0131959833ca8a65af60285995d57ced0de2fd8f16fc03235975d238"}, - {file = "orjson-3.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27b64695d9f2aef3ae15a0522e370ec95c946aaea7f2c97a1582a62b3bdd9169"}, - {file = "orjson-3.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:867d882ddee6a20be4c8b03ae3d2b0333894d53ad632d32bd9b8123649577171"}, - {file = "orjson-3.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0667458f8a8ceb6dee5c08fec0b46195f92c474cbbec71dca2a6b7fd5b67b8d"}, - {file = "orjson-3.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3eac9befc4eaec1d1ff3bba6210576be4945332dde194525601c5ddb5c060d3"}, - {file = "orjson-3.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4343245443552eae240a33047a6d1bcac7a754ad4b1c57318173c54d7efb9aea"}, - {file = "orjson-3.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30153e269eea43e98918d4d462a36a7065031d9246407dfff2579a4e457515c1"}, - {file = "orjson-3.10.4-cp312-none-win32.whl", hash = "sha256:1a7d092ee043abf3db19c2183115e80676495c9911843fdb3ebd48ca7b73079e"}, - {file = "orjson-3.10.4-cp312-none-win_amd64.whl", hash = "sha256:07a2adbeb8b9efe6d68fc557685954a1f19d9e33f5cc018ae1a89e96647c1b65"}, - {file = "orjson-3.10.4-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f5a746f3d908bce1a1e347b9ca89864047533bdfab5a450066a0315f6566527b"}, - {file = "orjson-3.10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:465b4a8a3e459f8d304c19071b4badaa9b267c59207a005a7dd9dfe13d3a423f"}, - {file = "orjson-3.10.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35858d260728c434a3d91b60685ab32418318567e8902039837e1c2af2719e0b"}, - {file = "orjson-3.10.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a5ba090d40c4460312dd69c232b38c2ff67a823185cfe667e841c9dd5c06841"}, - {file = "orjson-3.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dde86755d064664e62e3612a166c28298aa8dfd35a991553faa58855ae739cc"}, - {file = "orjson-3.10.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:020a9e9001cfec85c156ef3b185ff758b62ef986cefdb8384c4579facd5ce126"}, - {file = "orjson-3.10.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3bf8e6e3388a2e83a86466c912387e0f0a765494c65caa7e865f99969b76ba0d"}, - {file = "orjson-3.10.4-cp38-none-win32.whl", hash = "sha256:c5a1cca6a4a3129db3da68a25dc0a459a62ae58e284e363b35ab304202d9ba9e"}, - {file = "orjson-3.10.4-cp38-none-win_amd64.whl", hash = "sha256:ecd97d98d7bee3e3d51d0b51c92c457f05db4993329eea7c69764f9820e27eb3"}, - {file = "orjson-3.10.4-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:71362daa330a2fc85553a1469185ac448547392a8f83d34e67779f8df3a52743"}, - {file = "orjson-3.10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d24b59d1fecb0fd080c177306118a143f7322335309640c55ed9580d2044e363"}, - {file = "orjson-3.10.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e906670aea5a605b083ebb58d575c35e88cf880fa372f7cedaac3d51e98ff164"}, - {file = "orjson-3.10.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ce32ed4bc4d632268e4978e595fe5ea07e026b751482b4a0feec48f66a90abc"}, - {file = "orjson-3.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dcd34286246e0c5edd0e230d1da2daab2c1b465fcb6bac85b8d44057229d40a"}, - {file = "orjson-3.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c45d4b8c403e50beedb1d006a8916d9910ed56bceaf2035dc253618b44d0a161"}, - {file = "orjson-3.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:aaed3253041b5002a4f5bfdf6f7b5cce657d974472b0699a469d439beba40381"}, - {file = "orjson-3.10.4-cp39-none-win32.whl", hash = "sha256:9a4f41b7dbf7896f8dbf559b9b43dcd99e31e0d49ac1b59d74f52ce51ab10eb9"}, - {file = "orjson-3.10.4-cp39-none-win_amd64.whl", hash = "sha256:6c4eb7d867ed91cb61e6514cb4f457aa01d7b0fd663089df60a69f3d38b69d4c"}, - {file = "orjson-3.10.4.tar.gz", hash = "sha256:c912ed25b787c73fe994a5decd81c3f3b256599b8a87d410d799d5d52013af2a"}, + {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"}, + {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"}, + {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"}, + {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"}, + {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"}, + {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"}, + {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"}, + {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"}, + {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"}, + {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"}, + {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"}, + {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"}, + {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"}, + {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"}, + {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"}, + {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"}, ] [[package]] @@ -5435,20 +5540,20 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" @@ -5799,13 +5904,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-extra-types" -version = "2.8.1" +version = "2.8.2" description = "Extra Pydantic types." optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_extra_types-2.8.1-py3-none-any.whl", hash = "sha256:ca3fce71ee46bc1043bdf3d0e3c149a09ab162cb305c4ed8c501a5034a592dd6"}, - {file = "pydantic_extra_types-2.8.1.tar.gz", hash = "sha256:c7cabe403234658207dcefed3489f2e8bfc8f4a8e305e7ab25ee29eceed65b39"}, + {file = "pydantic_extra_types-2.8.2-py3-none-any.whl", hash = "sha256:f2400b3c3553fb7fa09a131967b4edf2d53f01ad9fa89d158784653f2e5c13d1"}, + {file = "pydantic_extra_types-2.8.2.tar.gz", hash = "sha256:4d2b3c52c1e2e4dfa31bf1d5a37b841b09e3c5a08ec2bffca0e07fc2ad7d5c4a"}, ] [package.dependencies] @@ -5884,29 +5989,24 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymilvus" -version = "2.4.3" +version = "2.3.1" description = "Python Sdk for Milvus" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pymilvus-2.4.3-py3-none-any.whl", hash = "sha256:38239e89f8d739f665141d0b80908990b5f59681e889e135c234a4a45669a5c8"}, - {file = "pymilvus-2.4.3.tar.gz", hash = "sha256:703ac29296cdce03d6dc2aaebbe959e57745c141a94150e371dc36c61c226cc1"}, + {file = "pymilvus-2.3.1-py3-none-any.whl", hash = "sha256:ce65e1de8700f33bd9aade20f013291629702e25b05726773208f1f0b22548ff"}, + {file = "pymilvus-2.3.1.tar.gz", hash = "sha256:d460f6204d7deb2cff93716bd65670c1b440694b77701fb0ab0ead791aa582c6"}, ] [package.dependencies] environs = "<=9.5.0" -grpcio = ">=1.49.1,<=1.63.0" -milvus-lite = ">=2.4.0,<2.5.0" +grpcio = ">=1.49.1,<=1.58.0" +minio = "*" pandas = ">=1.2.4" protobuf = ">=3.20.0" -setuptools = ">=67" +requests = "*" ujson = ">=2.0.0" -[package.extras] -bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "requests"] -dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] -model = ["milvus-model (>=0.1.0)"] - [[package]] name = "pymysql" version = "1.1.1" @@ -6013,59 +6113,59 @@ files = [ [[package]] name = "pyreqwest-impersonate" -version = "0.4.7" +version = "0.4.8" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" files = [ - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c175dfc429c4231a6ce03841630b236f50995ca613ff1eea26fa4c75c730b562"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3f83c50cef2d5ed0a9246318fd3ef3bfeabe286d4eabf92df4835c05a0be7dc"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34930113aa42f47e0542418f6a67bdb2c23fe0e2fa1866f60b29280a036b829"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88d2792df548b845edd409a3e4284f76cb4fc2510fe4a69fde9e39d54910b935"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27622d5183185dc63bcab9a7dd1de566688c63b844812b1d9366da7c459a494"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b7bf13d49ef127e659ed134129336e94f7107023ed0138c81a46321b9a580428"}, - {file = "pyreqwest_impersonate-0.4.7-cp310-none-win_amd64.whl", hash = "sha256:0cba006b076b85a875814a4b5dd8cb27f483ebeeb0de83984a3786060fe18e0d"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:370a8cb7a92b15749cbbe3ce7a9f09d35aac7d2a74505eb447f45419ea8ef2ff"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:33244ea10ccee08bac7a7ccdc3a8e6bef6e28f2466ed61de551fa24b76ee4b6a"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba24fb6db822cbd9cbac32539893cc19cc06dd1820e03536e685b9fd2a2ffdd"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e001ed09fc364cc00578fd31c0ae44d543cf75daf06b2657c7a82dcd99336ce"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:608525535f078e85114fcd4eeba0f0771ffc7093c29208e9c0a55147502723bf"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:38daedba0fc997e29cbc25c684a42a04aed38bfbcf85d8f1ffe8f87314d5f72f"}, - {file = "pyreqwest_impersonate-0.4.7-cp311-none-win_amd64.whl", hash = "sha256:d21f3e93ee0aecdc43d2914800bdf23501bde858d70ac7c0b06168f85f95bf22"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5caeee29370a06a322ea6951730d21ec3c641ce46417fd2b5805b283564f2fef"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1c7aa4b428ed58370975d828a95eaf10561712e79a4e2eafca1746a4654a34a8"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:887249adcab35487a44a5428ccab2a6363642785b36649a732d5e649df568b8e"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60f932de8033c15323ba79a7470406ca8228e07aa60078dee5a18e89f0a9fc88"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a2e6332fd6d78623a22f4e747688fe9e6005b61b6f208936d5428d2a65d34b39"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:349b005eef323195685ba5cb2b6f302da0db481e59f03696ef57099f232f0c1f"}, - {file = "pyreqwest_impersonate-0.4.7-cp312-none-win_amd64.whl", hash = "sha256:5620025ac138a10c46a9b14c91b6f58114d50063ff865a2d02ad632751b67b29"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ebf954e09b3dc800a7576c7bde9827b00064531364c7817356c7cc58eb4b46b2"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:112d9561f136548bd67d31cadb6b78d4c31751e526e62e09c6e581c2f1711455"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05213f5f014ecc6732d859a0f51b3dff0424748cc6e2d0d9a42aa1f7108b4eaa"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10fa70529a60fc043650ce03481fab7714e7519c3b06f5e81c95206b8b60aec6"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5b1288881eada1891db7e862c69b673fb159834a41f823b9b00fc52d0f096ccc"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:57ca562229c40615074f36e7f1ae5e57b8164f604eddb042132467c3a00fc2c5"}, - {file = "pyreqwest_impersonate-0.4.7-cp38-none-win_amd64.whl", hash = "sha256:c098ef1333511ea9a43be9a818fcc0866bd2caa63cdc9cf4ab48450ace675646"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:39d961330190bf2d59983ad16dafb4b42d5adcdfe7531ad099c8f3ab53f8d906"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d793591784b89953422b1efaa17460f57f6116de25b3e3065d9fa6cf220ef18"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:945116bb9ffb7e45a87e313f47de28c4da889b14bda620aebc5ba9c3600425cf"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b96a0955c49f346786ee997c755561fecf33b7886cecef861fe4db15c7b23ad3"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ed997197f907ccce9b86a75163b5e78743bc469d2ddcf8a22d4d90c2595573cb"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1f54788f6fb0ee8b31c1eaadba81fb003efb406a768844e2a1a50b855f4806bf"}, - {file = "pyreqwest_impersonate-0.4.7-cp39-none-win_amd64.whl", hash = "sha256:0a679e81b0175dcc670a5ed47a5c184d7031ce16b5c58bf6b2c650ab9f2496c8"}, - {file = "pyreqwest_impersonate-0.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bddb07e04e4006a2184608c44154983fdfa0ce2e230b0a7cec81cd4ba88dd07"}, - {file = "pyreqwest_impersonate-0.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:780c53bfd2fbda151081165733fba5d5b1e17dd61999360110820942e351d011"}, - {file = "pyreqwest_impersonate-0.4.7-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4bfa8ea763e6935e7660f8e885f1b00713b0d22f79a526c6ae6932b1856d1343"}, - {file = "pyreqwest_impersonate-0.4.7-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:96b23b0688a63cbd6c39237461baa95162a69a15e9533789163aabcaf3f572fb"}, - {file = "pyreqwest_impersonate-0.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b0eb56a8ad9d48952c613903d3ef6d8762d48dcec9807a509fee2a43e94ccac"}, - {file = "pyreqwest_impersonate-0.4.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9330176494e260521ea0eaae349ca06128dc527400248c57b378597c470d335c"}, - {file = "pyreqwest_impersonate-0.4.7-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6343bc3392781ff470e5dc47fea9f77bb61d8831b07e901900d31c46decec5d1"}, - {file = "pyreqwest_impersonate-0.4.7-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ecd598e16020a165029647ca80078311bf079e8317bf61c1b2fa824b8967e0db"}, - {file = "pyreqwest_impersonate-0.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a38f3014ac31b08f5fb1ef4e1eb6c6e810f51f6cb815d0066ab3f34ec0f82d98"}, - {file = "pyreqwest_impersonate-0.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db76a97068e5145f5b348037e09a91b2bed9c8eab92e79a3297b1306429fa839"}, - {file = "pyreqwest_impersonate-0.4.7-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1596a8ef8f20bbfe606a90ad524946747846611c8633cbdfbad0a4298b538218"}, - {file = "pyreqwest_impersonate-0.4.7-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dcee18bc350b3d3a0455422c446f1f03f00eb762b3e470066e2bc4664fd7110d"}, - {file = "pyreqwest_impersonate-0.4.7.tar.gz", hash = "sha256:74ba7e6e4f4f753da4f71a7e5dc12625b296bd7d6ddd64093a1fbff14d8d5df7"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:45cad57afe4e6f56078ed9a7a90d0dc839d19d3e7a70175c80af21017f383bfb"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1986600253baf38f25fd07b8bdc1903359c26e5d34beb7d7d084845554b5664d"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cca4e6e59b9ad0cd20bad6caed3ac96992cd9c1d3126ecdfcab2c0ac2b75376"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab6b32544491ee655264dab86fc8a58e47c4f87d196b28022d4007faf971a50"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:64bd6299e7fc888bb7f7292cf3e29504c406e5d5d04afd37ca994ab8142d8ee4"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e914b650dd953b8d9b24ef56aa4ecbfc16e399227b68accd818f8bf159e0c558"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-none-win_amd64.whl", hash = "sha256:cb56a2149b0c4548a8e0158b071a943f33dae9b717f92b5c9ac34ccd1f5a958c"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f62620e023490902feca0109f306e122e427feff7d59e03ecd22c69a89452367"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08d4c01d76da88cfe3d7d03b311b375ce3fb5a59130f93f0637bb755d6e56ff1"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524e276bc460176c79d7ba4b9131d9db73c534586660371ebdf067749252a33"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22863bc0aaf02ca2f5d76c8130929ae680b7d82dfc1c28c1ed5f306ff626928"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8cc82d57f6a91037e64a7aa9122f909576ef2a141a42ce599958ef9f8c4bc033"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da8a053308210e44fd8349f07f45442a0691ac932f2881e98b05cf9ac404b091"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-none-win_amd64.whl", hash = "sha256:4baf3916c14364a815a64ead7f728afb61b37541933b2771f18dbb245029bb55"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:78db05deed0b32c9c75f2b3168a3a9b7d5e36487b218cb839bfe7e2a143450cb"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9af9446d605903c2b4e94621a9093f8d8a403729bc9cbfbcb62929f8238c838f"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c55890181d8d81e66cac25a95e215dc9680645d01e9091b64449d5407ad9bc6"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69344e7ae9964502a8693da7ad77ebc3e1418ee197e2e394bc23c5d4970772a"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5db5c957a10d8cc2815085ba0b8fe09245b2f94c2225d9653a854a03b4217e1"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03c19c21f63f9c91c590c4bbcc32cc2d8066b508c683a1d163b8c7d9816a01d5"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-none-win_amd64.whl", hash = "sha256:0230610779129f74ff802c744643ce7589b1d07cba21d046fe3b574281c29581"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b8cb9471ab4b2fa7e80d3ac4e580249ff988d782f2938ad1f0428433652b170d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8081a5ace2658be91519902bde9ddc5f94e1f850a39be196007a25e3da5bbfdc"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69eababfa3200459276acd780a0f3eaf41d1fe7c02bd169e714cba422055b5b9"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632957fa671ebb841166e40913015de457225cb73600ef250c436c280e68bf45"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2ce7ddef334b4e5c68f5ea1da1d65f686b8d84f4443059d128e0f069d3fa499a"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6ce333d450b158d582e36317089a006440b4e66739a8e8849d170e4cb15e8c8d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-none-win_amd64.whl", hash = "sha256:9d9c85ce19db92362854f534807e470f03e905f283a7de6826dc79b790a8788e"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2503277f2a95a30e28e498570e2ed03ef4302f873054e8e21d6c0e607cbbc1d1"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8260395ef4ddae325e8b30cef0391adde7bd35e1a1decf8c729e26391f09b52d"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d8066b46d82bbaff5402d767e2f13d3449b8191c37bf8283e91d301a7159869"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c42f6343cfbd6663fb53edc9eb9feb4ebf6186b284e22368adc1eeb6a33854"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff534f491a059e74fb7f994876df86078b4b125dbecc53c098a298ecd55fa9c6"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b8fbf73b3ac513ddadafd338d61f79cd2370f0691d9175b2b92a45920920d6b"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-none-win_amd64.whl", hash = "sha256:a26447c82665d0e361207c1a15e56b0ca54974aa6c1fdfa18c68f908dec78cbe"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24a16b8d55309f0af0db9d04ff442b0c91afccf078a94809e7c3a71747a5c214"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8fada56465fc19179404cc9d5d5e1064f5dfe27405cb052f57a5b4fe06aed1"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a3d48d5abc146fd804395713427d944757a99254350e6a651e7d776818074aee"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:475829fe9994c66258157a8d4adb1c038f44f79f901208ba656d547842337227"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef1ec0e97623bc0e18469418cc4dd2c59a2d5fddcae944de61e13c0b46f910e"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91857b196de89e9b36d3f8629aa8772c0bbe7efef8334fe266956b1c192ec31c"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:63831e407487b8a21bb51f97cd86a616c291d5138f8caec16ab6019cf6423935"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c30e61de93bcd0a9d3ca226b1ae5475002afde61e9d85018a6a4a040eeb86567"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c72c37b03bce9900f5dbb4f476af17253ec60c13bf7a7259f71a8dc1b036cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f1096165741b5c2178ab15b0eb09b5de16dd39b1cc135767d72471f0a69ce"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70c940c0e4ef335e22a6c705b01f286ee44780b5909065d212d94d82ea2580cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81c06f21757602d85f16dbc1cbaee1121cd65455f65aed4c048b7dcda7be85c4"}, + {file = "pyreqwest_impersonate-0.4.8.tar.gz", hash = "sha256:1eba11d47bd17244c64fec1502cc26ee66cc5c8a3be131e408101ae2b455e5bc"}, ] [package.extras] @@ -6625,13 +6725,13 @@ test = ["coveralls", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-benc [[package]] name = "redis" -version = "5.0.5" +version = "5.0.6" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.5-py3-none-any.whl", hash = "sha256:30b47d4ebb6b7a0b9b40c1275a19b87bb6f46b3bed82a89012cf56dea4024ada"}, - {file = "redis-5.0.5.tar.gz", hash = "sha256:3417688621acf6ee368dec4a04dd95881be24efd34c79f00d31f62bb528800ae"}, + {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, + {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, ] [package.dependencies] @@ -6851,28 +6951,28 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.4.8" +version = "0.4.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"}, - {file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"}, - {file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"}, - {file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"}, - {file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"}, - {file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"}, - {file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"}, - {file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"}, + {file = "ruff-0.4.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b262ed08d036ebe162123170b35703aaf9daffecb698cd367a8d585157732991"}, + {file = "ruff-0.4.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:98ec2775fd2d856dc405635e5ee4ff177920f2141b8e2d9eb5bd6efd50e80317"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4555056049d46d8a381f746680db1c46e67ac3b00d714606304077682832998e"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e91175fbe48f8a2174c9aad70438fe9cb0a5732c4159b2a10a3565fea2d94cde"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e8e7b95673f22e0efd3571fb5b0cf71a5eaaa3cc8a776584f3b2cc878e46bff"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2d45ddc6d82e1190ea737341326ecbc9a61447ba331b0a8962869fcada758505"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78de3fdb95c4af084087628132336772b1c5044f6e710739d440fc0bccf4d321"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06b60f91bfa5514bb689b500a25ba48e897d18fea14dce14b48a0c40d1635893"}, + {file = "ruff-0.4.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88bffe9c6a454bf8529f9ab9091c99490578a593cc9f9822b7fc065ee0712a06"}, + {file = "ruff-0.4.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:673bddb893f21ab47a8334c8e0ea7fd6598ecc8e698da75bcd12a7b9d0a3206e"}, + {file = "ruff-0.4.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8c1aff58c31948cc66d0b22951aa19edb5af0a3af40c936340cd32a8b1ab7438"}, + {file = "ruff-0.4.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:784d3ec9bd6493c3b720a0b76f741e6c2d7d44f6b2be87f5eef1ae8cc1d54c84"}, + {file = "ruff-0.4.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:732dd550bfa5d85af8c3c6cbc47ba5b67c6aed8a89e2f011b908fc88f87649db"}, + {file = "ruff-0.4.9-py3-none-win32.whl", hash = "sha256:8064590fd1a50dcf4909c268b0e7c2498253273309ad3d97e4a752bb9df4f521"}, + {file = "ruff-0.4.9-py3-none-win_amd64.whl", hash = "sha256:e0a22c4157e53d006530c902107c7f550b9233e9706313ab57b892d7197d8e52"}, + {file = "ruff-0.4.9-py3-none-win_arm64.whl", hash = "sha256:5d5460f789ccf4efd43f265a58538a2c24dbce15dbf560676e430375f20a8198"}, + {file = "ruff-0.4.9.tar.gz", hash = "sha256:f1cb0828ac9533ba0135d148d214e284711ede33640465e706772645483427e3"}, ] [[package]] @@ -7148,18 +7248,18 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "70.0.0" +version = "70.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, + {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "sgmllib3k" @@ -7302,64 +7402,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.30" +version = "2.0.31" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, - {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, - {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, + {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, + {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -7479,13 +7579,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1166" +version = "3.0.1172" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1166.tar.gz", hash = "sha256:7e20a98f94cd82302f4f9a6c28cd1d1d90e1043767a9ff98eebe10def84ec7b9"}, - {file = "tencentcloud_sdk_python_common-3.0.1166-py2.py3-none-any.whl", hash = "sha256:e230159b275427c0ff95bd708df2ad625ab4a45ff495d9a89d4199d535ce68e9"}, + {file = "tencentcloud-sdk-python-common-3.0.1172.tar.gz", hash = "sha256:37b3b9f4a53caa070379afb6910ac989823eacd35169701405ddafb12ea14e9e"}, + {file = "tencentcloud_sdk_python_common-3.0.1172-py2.py3-none-any.whl", hash = "sha256:8915ddc713bcd7512e9d528ec36ad3e527990ab06f5e89f63941f2e5c23f4675"}, ] [package.dependencies] @@ -7493,17 +7593,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1166" +version = "3.0.1172" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1166.tar.gz", hash = "sha256:9be5f6ca91facdc40da91a0b9c300a0c54a83cf3792305d0e83c4216ca2a2e18"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1166-py2.py3-none-any.whl", hash = "sha256:572d41d034a68a898ac74dd4d92f6b764cdb2b993cf71e6fbc52a40e65b0b4b4"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1172.tar.gz", hash = "sha256:ae83b39c9da7302b10c4bffb7672ae95be72945b43e06a0b1ae9ac23bac2d43b"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1172-py2.py3-none-any.whl", hash = "sha256:443908059ef1a00a798b7387f85e210d89c65b4f9db73629e53b3ec609b8528b"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1166" +tencentcloud-sdk-python-common = "3.0.1172" [[package]] name = "threadpoolctl" @@ -8940,4 +9040,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "bd9e1ffff43e89fd985ff015daf9a4036fc80f2b78e135e9237fd73408c51a5a" +content-hash = "cac196b2ddb59d7873fb3380d87b622d002613d6dc1d271a5c15e46817a38c55" diff --git a/api/pyproject.toml b/api/pyproject.toml index 8915251be3358d..249113ddb9c1d7 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -145,7 +145,7 @@ xinference-client = "0.9.4" safetensors = "~0.4.3" zhipuai = "1.0.7" werkzeug = "~3.0.1" -pymilvus = "~2.4.3" +pymilvus = "2.3.1" qdrant-client = "1.7.3" cohere = "~5.2.4" pyyaml = "~6.0.1" @@ -187,6 +187,7 @@ tenacity = "~8.3.0" cos-python-sdk-v5 = "1.9.30" novita-client = "^0.5.6" opensearch-py = "2.4.0" +oracledb = "~2.2.1" [tool.poetry.group.dev] optional = true diff --git a/api/requirements-dev.txt b/api/requirements-dev.txt deleted file mode 100644 index 0391ac5969bcab..00000000000000 --- a/api/requirements-dev.txt +++ /dev/null @@ -1,5 +0,0 @@ -coverage~=7.2.4 -pytest~=8.1.1 -pytest-benchmark~=4.0.0 -pytest-env~=1.1.3 -pytest-mock~=3.14.0 diff --git a/api/requirements.txt b/api/requirements.txt index f9371c57035956..e69de29bb2d1d6 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,95 +0,0 @@ -beautifulsoup4==4.12.2 -flask~=3.0.1 -Flask-SQLAlchemy~=3.0.5 -SQLAlchemy~=2.0.29 -Flask-Compress~=1.14 -flask-login~=0.6.3 -flask-migrate~=4.0.5 -flask-restful~=0.3.10 -flask-cors~=4.0.0 -gunicorn~=22.0.0 -gevent~=23.9.1 -openai~=1.29.0 -tiktoken~=0.7.0 -psycopg2-binary~=2.9.6 -pycryptodome==3.19.1 -python-dotenv==1.0.0 -Authlib==1.3.1 -boto3==1.34.123 -cachetools~=5.3.0 -weaviate-client~=3.21.0 -mailchimp-transactional~=1.0.50 -scikit-learn==1.2.2 -sentry-sdk[flask]~=1.39.2 -sympy==1.12 -jieba==0.42.1 -celery~=5.3.6 -redis[hiredis]~=5.0.3 -chardet~=5.1.0 -python-docx~=1.1.0 -pypdfium2~=4.17.0 -resend~=0.7.0 -pyjwt~=2.8.0 -anthropic~=0.23.1 -newspaper3k==0.2.8 -wikipedia==1.4.0 -readabilipy==0.2.0 -google-ai-generativelanguage==0.6.1 -google-api-core==2.18.0 -google-api-python-client==2.90.0 -google-auth==2.29.0 -google-auth-httplib2==0.2.0 -google-generativeai==0.5.0 -googleapis-common-protos==1.63.0 -google-cloud-storage==2.16.0 -replicate~=0.22.0 -websocket-client~=1.7.0 -dashscope[tokenizer]~=1.17.0 -huggingface_hub~=0.16.4 -transformers~=4.35.0 -tokenizers~=0.15.0 -pandas[performance,excel]~=2.2.2 -xinference-client==0.9.4 -safetensors~=0.4.3 -zhipuai==1.0.7 -werkzeug~=3.0.1 -pymilvus~=2.4.3 -qdrant-client==1.7.3 -cohere~=5.2.4 -pyyaml~=6.0.1 -numpy~=1.26.4 -unstructured[docx,pptx,msg,md,ppt,epub]~=0.10.27 -bs4~=0.0.1 -markdown~=3.5.1 -httpx[socks]~=0.27.0 -matplotlib~=3.8.2 -yfinance~=0.2.40 -pydub~=0.25.1 -gmpy2~=2.1.5 -numexpr~=2.9.0 -duckduckgo-search~=6.1.5 -arxiv==2.1.0 -yarl~=1.9.4 -twilio~=9.0.4 -qrcode~=7.4.2 -azure-storage-blob==12.13.0 -azure-identity==1.16.1 -lxml==5.1.0 -pydantic~=2.7.4 -pydantic_extra_types~=2.8.1 -pydantic-settings~=2.3.3 -pgvecto-rs==0.1.4 -tcvectordb==1.3.2 -firecrawl-py==0.0.5 -oss2==2.18.5 -pgvector==0.2.5 -pymysql==1.1.1 -tidb-vector==0.0.9 -google-cloud-aiplatform==1.49.0 -vanna[postgres,mysql,clickhouse,duckdb]==0.5.5 -tencentcloud-sdk-python-hunyuan~=3.0.1158 -chromadb~=0.5.1 -novita_client~=0.5.6 -tenacity~=8.3.0 -opensearch-py==2.4.0 -cos-python-sdk-v5==1.9.30 \ No newline at end of file diff --git a/api/services/__init__.py b/api/services/__init__.py index 20e68ab6d94cf2..6891436314b299 100644 --- a/api/services/__init__.py +++ b/api/services/__init__.py @@ -1 +1,3 @@ -import services.errors +from . import errors + +__all__ = ['errors'] diff --git a/api/services/account_service.py b/api/services/account_service.py index 7551c9cb4bb767..2c401aad911791 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -13,7 +13,6 @@ from constants.languages import language_timezone_mapping, languages from events.tenant_event import tenant_was_created from extensions.ext_redis import redis_client -from libs.helper import get_remote_ip from libs.passport import PassportService from libs.password import compare_password, hash_password, valid_password from libs.rsa import generate_key_pair @@ -67,10 +66,10 @@ def load_user(user_id: str) -> Account: @staticmethod - def get_account_jwt_token(account): + def get_account_jwt_token(account, *, exp: timedelta = timedelta(days=30)): payload = { "user_id": account.id, - "exp": datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(days=30), + "exp": datetime.now(timezone.utc).replace(tzinfo=None) + exp, "iss": current_app.config['EDITION'], "sub": 'Console API Passport', } @@ -195,14 +194,35 @@ def update_account(account, **kwargs): return account @staticmethod - def update_last_login(account: Account, request) -> None: + def update_last_login(account: Account, *, ip_address: str) -> None: """Update last login time and ip""" account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None) - account.last_login_ip = get_remote_ip(request) + account.last_login_ip = ip_address db.session.add(account) db.session.commit() logging.info(f'Account {account.id} logged in successfully.') + @staticmethod + def login(account: Account, *, ip_address: Optional[str] = None): + if ip_address: + AccountService.update_last_login(account, ip_address=ip_address) + exp = timedelta(days=30) + token = AccountService.get_account_jwt_token(account, exp=exp) + redis_client.set(_get_login_cache_key(account_id=account.id, token=token), '1', ex=int(exp.total_seconds())) + return token + + @staticmethod + def logout(*, account: Account, token: str): + redis_client.delete(_get_login_cache_key(account_id=account.id, token=token)) + + @staticmethod + def load_logged_in_account(*, account_id: str, token: str): + if not redis_client.get(_get_login_cache_key(account_id=account_id, token=token)): + return None + return AccountService.load_user(account_id) + +def _get_login_cache_key(*, account_id: str, token: str): + return f"account_login:{account_id}:{token}" class TenantService: diff --git a/api/services/errors/__init__.py b/api/services/errors/__init__.py index 493919d373bb17..bb5711145c0a44 100644 --- a/api/services/errors/__init__.py +++ b/api/services/errors/__init__.py @@ -1,6 +1,29 @@ +from . import ( + account, + app, + app_model_config, + audio, + base, + completion, + conversation, + dataset, + document, + file, + index, + message, +) + __all__ = [ - 'base', 'conversation', 'message', 'index', 'app_model_config', 'account', 'document', 'dataset', - 'app', 'completion', 'audio', 'file' + "base", + "conversation", + "message", + "index", + "app_model_config", + "account", + "document", + "dataset", + "app", + "completion", + "audio", + "file", ] - -from . import * diff --git a/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py b/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py index e17d0acf99fd4b..6afec540ade181 100644 --- a/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py +++ b/api/tests/integration_tests/model_runtime/azure_openai/test_llm.py @@ -156,11 +156,6 @@ def test_invoke_chat_model(setup_openai_mock): assert isinstance(result, LLMResult) assert len(result.message.content) > 0 - for chunk in model._llm_result_to_stream(result): - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True) def test_invoke_stream_chat_model(setup_openai_mock): diff --git a/api/tests/integration_tests/model_runtime/cohere/test_llm.py b/api/tests/integration_tests/model_runtime/cohere/test_llm.py index 499e6289bcdf62..5ce4f8ecfe874f 100644 --- a/api/tests/integration_tests/model_runtime/cohere/test_llm.py +++ b/api/tests/integration_tests/model_runtime/cohere/test_llm.py @@ -136,12 +136,6 @@ def test_invoke_chat_model(): assert isinstance(result, LLMResult) assert len(result.message.content) > 0 - for chunk in model._llm_result_to_stream(result): - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - def test_invoke_stream_chat_model(): model = CohereLargeLanguageModel() diff --git a/api/tests/integration_tests/model_runtime/openai/test_llm.py b/api/tests/integration_tests/model_runtime/openai/test_llm.py index 0da4dbb49d1ea0..bf4ac53579fb6e 100644 --- a/api/tests/integration_tests/model_runtime/openai/test_llm.py +++ b/api/tests/integration_tests/model_runtime/openai/test_llm.py @@ -156,12 +156,6 @@ def test_invoke_chat_model(setup_openai_mock): assert isinstance(result, LLMResult) assert len(result.message.content) > 0 - for chunk in model._llm_result_to_stream(result): - assert isinstance(chunk, LLMResultChunk) - assert isinstance(chunk.delta, LLMResultChunkDelta) - assert isinstance(chunk.delta.message, AssistantPromptMessage) - assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True - @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True) def test_invoke_chat_model_with_vision(setup_openai_mock): model = OpenAILargeLanguageModel() diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py b/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py new file mode 100644 index 00000000000000..1b0a38d5d15a00 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/tongyi/test_response_format.py @@ -0,0 +1,84 @@ +import json +import os +from collections.abc import Generator + +from core.model_runtime.entities.llm_entities import LLMResultChunk, LLMResultChunkDelta +from core.model_runtime.entities.message_entities import AssistantPromptMessage, UserPromptMessage +from core.model_runtime.model_providers.tongyi.llm.llm import TongyiLargeLanguageModel + + +def test_invoke_model_with_json_response(): + """ + Test the invocation of a model with JSON response. + """ + model_list = [ + "qwen-max-0403", + "qwen-max-1201", + "qwen-max-longcontext", + "qwen-max", + "qwen-plus-chat", + "qwen-plus", + "qwen-turbo-chat", + "qwen-turbo", + ] + for model_name in model_list: + print("testing model: ", model_name) + invoke_model_with_json_response(model_name) + + +def invoke_model_with_json_response(model_name="qwen-max-0403"): + """ + Method to invoke the model with JSON response format. + Args: + model_name (str): The name of the model to invoke. Defaults to "qwen-max-0403". + + Returns: + None + """ + model = TongyiLargeLanguageModel() + + response = model.invoke( + model=model_name, + credentials={ + 'dashscope_api_key': os.environ.get('TONGYI_DASHSCOPE_API_KEY') + }, + prompt_messages=[ + UserPromptMessage( + content='output json data with format `{"data": "test", "code": 200, "msg": "success"}' + ) + ], + model_parameters={ + 'temperature': 0.5, + 'max_tokens': 50, + 'response_format': 'JSON', + }, + stream=True, + user="abc-123" + ) + print("=====================================") + print(response) + assert isinstance(response, Generator) + output = "" + for chunk in response: + assert isinstance(chunk, LLMResultChunk) + assert isinstance(chunk.delta, LLMResultChunkDelta) + assert isinstance(chunk.delta.message, AssistantPromptMessage) + output += chunk.delta.message.content + assert is_json(output) + + +def is_json(s): + """ + Check if a string is a valid JSON. + + Args: + s (str): The string to check. + + Returns: + bool: True if the string is a valid JSON, False otherwise. + """ + try: + json.loads(s) + except ValueError: + return False + return True \ No newline at end of file diff --git a/api/tests/integration_tests/vdb/oracle/__init__.py b/api/tests/integration_tests/vdb/oracle/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/api/tests/integration_tests/vdb/oracle/test_oraclevector.py b/api/tests/integration_tests/vdb/oracle/test_oraclevector.py new file mode 100644 index 00000000000000..3252b0427609c6 --- /dev/null +++ b/api/tests/integration_tests/vdb/oracle/test_oraclevector.py @@ -0,0 +1,30 @@ +from core.rag.datasource.vdb.oracle.oraclevector import OracleVector, OracleVectorConfig +from core.rag.models.document import Document +from tests.integration_tests.vdb.test_vector_store import ( + AbstractVectorTest, + get_example_text, + setup_mock_redis, +) + + +class OracleVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = OracleVector( + collection_name=self.collection_name, + config=OracleVectorConfig( + host="localhost", + port=1521, + user="dify", + password="dify", + database="FREEPDB1", + ), + ) + + def search_by_full_text(self): + hits_by_full_text: list[Document] = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + +def test_oraclevector(setup_mock_redis): + OracleVectorTest().run_all_tests() diff --git a/api/tests/unit_tests/settings/test_dify_settings.py b/api/tests/unit_tests/configs/test_dify_config.py similarity index 58% rename from api/tests/unit_tests/settings/test_dify_settings.py rename to api/tests/unit_tests/configs/test_dify_config.py index b5400d5c2c049d..6a6fe35f66c7b3 100644 --- a/api/tests/unit_tests/settings/test_dify_settings.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -3,7 +3,7 @@ import pytest from flask import Flask -from configs.app_configs import DifyConfigs +from configs.app_config import DifyConfig EXAMPLE_ENV_FILENAME = '.env' @@ -19,32 +19,32 @@ def example_env_file(tmp_path, monkeypatch) -> str: return str(file_path) -def test_dify_configs_undefined_entry(example_env_file): +def test_dify_config_undefined_entry(example_env_file): # load dotenv file with pydantic-settings - settings = DifyConfigs(_env_file=example_env_file) + config = DifyConfig(_env_file=example_env_file) # entries not defined in app settings with pytest.raises(TypeError): # TypeError: 'AppSettings' object is not subscriptable - assert settings['LOG_LEVEL'] == 'INFO' + assert config['LOG_LEVEL'] == 'INFO' -def test_dify_configs(example_env_file): +def test_dify_config(example_env_file): # load dotenv file with pydantic-settings - settings = DifyConfigs(_env_file=example_env_file) + config = DifyConfig(_env_file=example_env_file) # constant values - assert settings.COMMIT_SHA == '' + assert config.COMMIT_SHA == '' # default values - assert settings.EDITION == 'SELF_HOSTED' - assert settings.API_COMPRESSION_ENABLED is False - assert settings.SENTRY_TRACES_SAMPLE_RATE == 1.0 + assert config.EDITION == 'SELF_HOSTED' + assert config.API_COMPRESSION_ENABLED is False + assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0 def test_flask_configs(example_env_file): flask_app = Flask('app') - flask_app.config.from_mapping(DifyConfigs(_env_file=example_env_file).model_dump()) + flask_app.config.from_mapping(DifyConfig(_env_file=example_env_file).model_dump()) config = flask_app.config # configs read from dotenv directly @@ -60,3 +60,14 @@ def test_flask_configs(example_env_file): assert config['CONSOLE_API_URL'] == 'https://example.com' # fallback to alias choices value as CONSOLE_API_URL assert config['FILES_URL'] == 'https://example.com' + + assert config['SQLALCHEMY_DATABASE_URI'] == 'postgresql://postgres:@localhost:5432/dify' + assert config['SQLALCHEMY_ENGINE_OPTIONS'] == { + 'connect_args': { + 'options': '-c timezone=UTC', + }, + 'max_overflow': 10, + 'pool_pre_ping': False, + 'pool_recycle': 3600, + 'pool_size': 30, + } diff --git a/docker/docker-compose.milvus.yaml b/docker/docker-compose.milvus.yaml index b884307efef7fa..c422efbf4b0633 100644 --- a/docker/docker-compose.milvus.yaml +++ b/docker/docker-compose.milvus.yaml @@ -38,7 +38,7 @@ services: milvus-standalone: container_name: milvus-standalone - image: milvusdb/milvus:v2.4.4 + image: milvusdb/milvus:v2.3.1 command: ["milvus", "run", "standalone"] environment: ETCD_ENDPOINTS: etcd:2379 diff --git a/docker/docker-compose.oracle.yaml b/docker/docker-compose.oracle.yaml new file mode 100644 index 00000000000000..527bd7f577161f --- /dev/null +++ b/docker/docker-compose.oracle.yaml @@ -0,0 +1,18 @@ +version: '3' +services: + # oracle 23 ai vector store. + oracle: + image: container-registry.oracle.com/database/free:latest + restart: always + ports: + - 1521:1521 + volumes: + - type: volume + source: oradata_vector + target: /opt/oracle/oradata + - ./startupscripts:/opt/oracle/scripts/startup + environment: + - ORACLE_PWD=Dify123456 + - ORACLE_CHARACTERSET=AL32UTF8 +volumes: + oradata_vector: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 1d3af7e4133077..0e0f997c97776e 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -154,6 +154,12 @@ services: TIDB_VECTOR_USER: xxx.root TIDB_VECTOR_PASSWORD: xxxxxx TIDB_VECTOR_DATABASE: dify + # oracle configurations + ORACLE_HOST: oracle + ORACLE_PORT: 1521 + ORACLE_USER: dify + ORACLE_PASSWORD: dify + ORACLE_DATABASE: FREEPDB1 # Chroma configuration CHROMA_HOST: 127.0.0.1 CHROMA_PORT: 8000 @@ -350,6 +356,12 @@ services: TIDB_VECTOR_USER: xxx.root TIDB_VECTOR_PASSWORD: xxxxxx TIDB_VECTOR_DATABASE: dify + # oracle configurations + ORACLE_HOST: oracle + ORACLE_PORT: 1521 + ORACLE_USER: dify + ORACLE_PASSWORD: dify + ORACLE_DATABASE: FREEPDB1 # Chroma configuration CHROMA_HOST: 127.0.0.1 CHROMA_PORT: 8000 @@ -530,6 +542,22 @@ services: # timeout: 3s # retries: 30 + # The oracle vector database. + # Uncomment to use oracle23ai as vector store. Also need to Uncomment volumes block + # oracle: + # image: container-registry.oracle.com/database/free:latest + # restart: always + # ports: + # - 1521:1521 + # volumes: + # - type: volume + # source: oradata + # target: /opt/oracle/oradata + # - ./startupscripts:/opt/oracle/scripts/startup + # environment: + # - ORACLE_PWD=Dify123456 + # - ORACLE_CHARACTERSET=AL32UTF8 + # The nginx reverse proxy. # used for reverse proxying the API service and Web service. @@ -555,3 +583,6 @@ networks: ssrf_proxy_network: driver: bridge internal: true + +#volumes: +# oradata: diff --git a/docker/startupscripts/create_user.sql b/docker/startupscripts/create_user.sql new file mode 100755 index 00000000000000..b80e19c3b05a06 --- /dev/null +++ b/docker/startupscripts/create_user.sql @@ -0,0 +1,5 @@ +show pdbs; +ALTER SYSTEM SET PROCESSES=500 SCOPE=SPFILE; +alter session set container= freepdb1; +create user dify identified by dify DEFAULT TABLESPACE users quota unlimited on users; +grant DB_DEVELOPER_ROLE to dify; diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx index 1aebec0b4f17a9..c51f7071f1e070 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx @@ -4,6 +4,16 @@ import { useUnmount } from 'ahooks' import React, { useCallback, useEffect, useState } from 'react' import { usePathname, useRouter } from 'next/navigation' import cn from 'classnames' +import { + RiDashboard2Fill, + RiDashboard2Line, + RiFileList3Fill, + RiFileList3Line, + RiTerminalBoxFill, + RiTerminalBoxLine, + RiTerminalWindowFill, + RiTerminalWindowLine, +} from '@remixicon/react' import { useTranslation } from 'react-i18next' import { useShallow } from 'zustand/react/shallow' import s from './style.module.css' @@ -13,8 +23,6 @@ import type { NavIcon } from '@/app/components/app-sidebar/navLink' import { fetchAppDetail } from '@/service/apps' import { useAppContext } from '@/context/app-context' import Loading from '@/app/components/base/loading' -import { BarChartSquare02, FileHeart02, PromptEngineering, TerminalSquare } from '@/app/components/base/icons/src/vender/line/development' -import { BarChartSquare02 as BarChartSquare02Solid, FileHeart02 as FileHeart02Solid, PromptEngineering as PromptEngineeringSolid, TerminalSquare as TerminalSquareSolid } from '@/app/components/base/icons/src/vender/solid/development' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' export type IAppDetailLayoutProps = { @@ -51,16 +59,16 @@ const AppDetailLayout: FC = (props) => { ? [{ name: t('common.appMenus.promptEng'), href: `/app/${appId}/${(mode === 'workflow' || mode === 'advanced-chat') ? 'workflow' : 'configuration'}`, - icon: PromptEngineering, - selectedIcon: PromptEngineeringSolid, + icon: RiTerminalWindowLine, + selectedIcon: RiTerminalWindowFill, }] : [] ), { name: t('common.appMenus.apiAccess'), href: `/app/${appId}/develop`, - icon: TerminalSquare, - selectedIcon: TerminalSquareSolid, + icon: RiTerminalBoxLine, + selectedIcon: RiTerminalBoxFill, }, ...(isCurrentWorkspaceManager ? [{ @@ -68,16 +76,16 @@ const AppDetailLayout: FC = (props) => { ? t('common.appMenus.logAndAnn') : t('common.appMenus.logs'), href: `/app/${appId}/logs`, - icon: FileHeart02, - selectedIcon: FileHeart02Solid, + icon: RiFileList3Line, + selectedIcon: RiFileList3Fill, }] : [] ), { name: t('common.appMenus.overview'), href: `/app/${appId}/overview`, - icon: BarChartSquare02, - selectedIcon: BarChartSquare02Solid, + icon: RiDashboard2Line, + selectedIcon: RiDashboard2Fill, }, ] return navs diff --git a/web/app/(commonLayout)/apps/AppCard.tsx b/web/app/(commonLayout)/apps/AppCard.tsx index c36090d476b0b3..f0007b7e4145ba 100644 --- a/web/app/(commonLayout)/apps/AppCard.tsx +++ b/web/app/(commonLayout)/apps/AppCard.tsx @@ -5,6 +5,7 @@ import { useRouter } from 'next/navigation' import { useCallback, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { RiMoreFill } from '@remixicon/react' import s from './style.module.css' import type { App } from '@/types/app' import Confirm from '@/app/components/base/confirm' @@ -22,7 +23,6 @@ import { useProviderContext } from '@/context/provider-context' import { NEED_REFRESH_APP_LIST_KEY } from '@/config' import { AiText, ChatBot, CuteRobote } from '@/app/components/base/icons/src/vender/solid/communication' import { Route } from '@/app/components/base/icons/src/vender/solid/mapsAndTravel' -import { DotsHorizontal } from '@/app/components/base/icons/src/vender/line/general' import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal' import EditAppModal from '@/app/components/explore/create-app-modal' import SwitchAppModal from '@/app/components/app/switch-app-modal' @@ -310,7 +310,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
- +
} btnClassName={open => diff --git a/web/app/(commonLayout)/apps/Apps.tsx b/web/app/(commonLayout)/apps/Apps.tsx index bf91d42fc8f3c9..a82ddd74b58957 100644 --- a/web/app/(commonLayout)/apps/Apps.tsx +++ b/web/app/(commonLayout)/apps/Apps.tsx @@ -4,6 +4,12 @@ import { useCallback, useEffect, useRef, useState } from 'react' import useSWRInfinite from 'swr/infinite' import { useTranslation } from 'react-i18next' import { useDebounceFn } from 'ahooks' +import { + RiApps2Line, + RiExchange2Line, + RiMessage3Line, + RiRobot3Line, +} from '@remixicon/react' import AppCard from './AppCard' import NewAppCard from './NewAppCard' import useAppsQueryState from './hooks/useAppsQueryState' @@ -14,12 +20,6 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config' import { CheckModal } from '@/hooks/use-pay' import TabSliderNew from '@/app/components/base/tab-slider-new' import { useTabSearchParams } from '@/hooks/use-tab-searchparams' -import { DotsGrid } from '@/app/components/base/icons/src/vender/line/general' -import { - ChatBot, - CuteRobot, -} from '@/app/components/base/icons/src/vender/line/communication' -import { Route } from '@/app/components/base/icons/src/vender/line/mapsAndTravel' import SearchInput from '@/app/components/base/search-input' import { useStore as useTagStore } from '@/app/components/base/tag-management/store' import TagManagementModal from '@/app/components/base/tag-management' @@ -73,10 +73,10 @@ const Apps = () => { const anchorRef = useRef(null) const options = [ - { value: 'all', text: t('app.types.all'), icon: }, - { value: 'chat', text: t('app.types.chatbot'), icon: }, - { value: 'agent-chat', text: t('app.types.agent'), icon: }, - { value: 'workflow', text: t('app.types.workflow'), icon: }, + { value: 'all', text: t('app.types.all'), icon: }, + { value: 'chat', text: t('app.types.chatbot'), icon: }, + { value: 'agent-chat', text: t('app.types.agent'), icon: }, + { value: 'workflow', text: t('app.types.workflow'), icon: }, ] useEffect(() => { diff --git a/web/app/(commonLayout)/datasets/DatasetCard.tsx b/web/app/(commonLayout)/datasets/DatasetCard.tsx index 9fe0af4bba1852..8f132414249e60 100644 --- a/web/app/(commonLayout)/datasets/DatasetCard.tsx +++ b/web/app/(commonLayout)/datasets/DatasetCard.tsx @@ -5,6 +5,9 @@ import Link from 'next/link' import { useCallback, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { + RiMoreFill, +} from '@remixicon/react' import Confirm from '@/app/components/base/confirm' import { ToastContext } from '@/app/components/base/toast' import { deleteDataset } from '@/service/datasets' @@ -14,7 +17,6 @@ import { Folder } from '@/app/components/base/icons/src/vender/solid/files' import type { HtmlContentProps } from '@/app/components/base/popover' import CustomPopover from '@/app/components/base/popover' import Divider from '@/app/components/base/divider' -import { DotsHorizontal } from '@/app/components/base/icons/src/vender/line/general' import RenameDatasetModal from '@/app/components/datasets/rename-modal' import type { Tag } from '@/app/components/base/tag-management/constant' import TagSelector from '@/app/components/base/tag-management/selector' @@ -167,7 +169,7 @@ const DatasetCard = ({
- +
} btnClassName={open => diff --git a/web/app/(commonLayout)/datasets/NewDatasetCard.tsx b/web/app/(commonLayout)/datasets/NewDatasetCard.tsx index f3e34ff7e22556..f76efa57696d38 100644 --- a/web/app/(commonLayout)/datasets/NewDatasetCard.tsx +++ b/web/app/(commonLayout)/datasets/NewDatasetCard.tsx @@ -2,7 +2,9 @@ import { forwardRef } from 'react' import { useTranslation } from 'react-i18next' -import { Plus } from '@/app/components/base/icons/src/vender/line/general' +import { + RiAddLine, +} from '@remixicon/react' const CreateAppCard = forwardRef((_, ref) => { const { t } = useTranslation() @@ -11,7 +13,7 @@ const CreateAppCard = forwardRef((_, ref) => {
- +
{t('dataset.createDataset')}
diff --git a/web/app/(commonLayout)/list.module.css b/web/app/(commonLayout)/list.module.css index 3e34f11ea5f2b8..bb2aa8606c38d8 100644 --- a/web/app/(commonLayout)/list.module.css +++ b/web/app/(commonLayout)/list.module.css @@ -85,13 +85,13 @@ background-image: url("./apps/assets/add.svg"); } -.newItemIconChat { +/* .newItemIconChat { background-image: url("~@/app/components/base/icons/assets/public/header-nav/studio/Robot.svg"); } .selected .newItemIconChat { background-image: url("~@/app/components/base/icons/assets/public/header-nav/studio/Robot-Active.svg"); -} +} */ .newItemIconComplete { background-image: url("./apps/assets/completion.svg"); diff --git a/web/app/(shareLayout)/webapp-signin/page.tsx b/web/app/(shareLayout)/webapp-signin/page.tsx index abfdeaa6e31ada..ebb83884d6c2e9 100644 --- a/web/app/(shareLayout)/webapp-signin/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/page.tsx @@ -140,7 +140,7 @@ const WebSSOForm: FC = () => { variant='primary' onClick={() => { handleSSOLogin() }} disabled={isLoading} - className="w-full !fone-medium !text-sm" + className="w-full !text-sm" >{t('login.sso')} diff --git a/web/app/activate/activateForm.tsx b/web/app/activate/activateForm.tsx index 543e0de0cdac85..9004b5f404c549 100644 --- a/web/app/activate/activateForm.tsx +++ b/web/app/activate/activateForm.tsx @@ -109,7 +109,7 @@ const ActivateForm = () => {

{t('login.invalid')}

@@ -196,7 +196,7 @@ const ActivateForm = () => {
diff --git a/web/app/components/app-sidebar/app-info.tsx b/web/app/components/app-sidebar/app-info.tsx index 2d18e8e0c2e4a1..89d5a93dbec35f 100644 --- a/web/app/components/app-sidebar/app-info.tsx +++ b/web/app/components/app-sidebar/app-info.tsx @@ -2,6 +2,7 @@ import { useTranslation } from 'react-i18next' import { useRouter } from 'next/navigation' import { useContext, useContextSelector } from 'use-context-selector' import cn from 'classnames' +import { RiArrowDownSLine } from '@remixicon/react' import React, { useCallback, useState } from 'react' import AppIcon from '../base/app-icon' import SwitchAppModal from '../app/switch-app-modal' @@ -11,7 +12,6 @@ import { PortalToFollowElemContent, PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' -import { ChevronDown } from '@/app/components/base/icons/src/vender/line/arrows' import Divider from '@/app/components/base/divider' import Confirm from '@/app/components/base/confirm' import { useStore as useAppStore } from '@/app/components/app/store' @@ -190,7 +190,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => {
{appDetail.name}
- {isCurrentWorkspaceEditor && } + {isCurrentWorkspaceEditor && }
{appDetail.mode === 'advanced-chat' && ( diff --git a/web/app/components/app/annotation/add-annotation-modal/index.tsx b/web/app/components/app/annotation/add-annotation-modal/index.tsx index 261593c00dda96..402431afb79be8 100644 --- a/web/app/components/app/annotation/add-annotation-modal/index.tsx +++ b/web/app/components/app/annotation/add-annotation-modal/index.tsx @@ -104,8 +104,8 @@ const AddAnnotationModal: FC = ({
{t('appAnnotation.addModal.createNext')}
- - + +
diff --git a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx index 67c69aacc3d72d..ed84d0e05cddcc 100644 --- a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx +++ b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx @@ -4,9 +4,9 @@ import React, { useEffect, useRef, useState } from 'react' import cn from 'classnames' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' +import { RiDeleteBinLine } from '@remixicon/react' import { Csv as CSVIcon } from '@/app/components/base/icons/src/public/files' import { ToastContext } from '@/app/components/base/toast' -import { Trash03 } from '@/app/components/base/icons/src/vender/line/general' import Button from '@/app/components/base/button' export type Props = { @@ -113,7 +113,7 @@ const CSVUploader: FC = ({
- +
diff --git a/web/app/components/app/annotation/batch-add-annotation-modal/index.tsx b/web/app/components/app/annotation/batch-add-annotation-modal/index.tsx index db5aab5251861d..8295df6e4dfd91 100644 --- a/web/app/components/app/annotation/batch-add-annotation-modal/index.tsx +++ b/web/app/components/app/annotation/batch-add-annotation-modal/index.tsx @@ -2,11 +2,11 @@ import type { FC } from 'react' import React, { useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' +import { RiCloseLine } from '@remixicon/react' import CSVUploader from './csv-uploader' import CSVDownloader from './csv-downloader' import Button from '@/app/components/base/button' import Modal from '@/app/components/base/modal' -import { XClose } from '@/app/components/base/icons/src/vender/line/general' import Toast from '@/app/components/base/toast' import { annotationBatchImport, checkAnnotationBatchImportProgress } from '@/service/annotation' import { useProviderContext } from '@/context/provider-context' @@ -90,7 +90,7 @@ const BatchModal: FC = ({ { }} className='px-8 py-6 !max-w-[520px] !rounded-xl'>
{t('appAnnotation.batchModal.title')}
- +
= ({ }} >
- +
{t('common.operation.delete')}
@@ -119,8 +120,8 @@ const EditItem: FC = ({ autoFocus />
- - + +
)} diff --git a/web/app/components/app/annotation/header-opts/index.tsx b/web/app/components/app/annotation/header-opts/index.tsx index 9573b052f3f74e..6268df65f04e5c 100644 --- a/web/app/components/app/annotation/header-opts/index.tsx +++ b/web/app/components/app/annotation/header-opts/index.tsx @@ -3,13 +3,15 @@ import type { FC } from 'react' import React, { Fragment, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { + RiAddLine, +} from '@remixicon/react' import { useContext } from 'use-context-selector' import { useCSVDownloader, } from 'react-papaparse' import { Menu, Transition } from '@headlessui/react' import Button from '../../../base/button' -import { Plus } from '../../../base/icons/src/vender/line/general' import AddAnnotationModal from '../add-annotation-modal' import type { AnnotationItemBasic } from '../type' import BatchAddModal from '../batch-add-annotation-modal' @@ -136,7 +138,7 @@ const HeaderOptions: FC = ({ return (
= ({ const fetchAnnotationConfig = async () => { const res = await doFetchAnnotationConfig(appDetail.id) setAnnotationConfig(res as AnnotationReplyConfig) + return (res as AnnotationReplyConfig).id } useEffect(() => { const isChatApp = appDetail.mode !== 'completion' @@ -284,9 +285,9 @@ const Annotation: FC = ({ const { job_id: jobId }: any = await updateAnnotationStatus(appDetail.id, AnnotationEnableStatus.enable, embeddingModel, score) await ensureJobCompleted(jobId, AnnotationEnableStatus.enable) } - + const annotationId = await fetchAnnotationConfig() if (score !== annotationConfig?.score_threshold) - await updateAnnotationScore(appDetail.id, annotationConfig?.id || '', score) + await updateAnnotationScore(appDetail.id, annotationId, score) await fetchAnnotationConfig() Toast.notify({ diff --git a/web/app/components/app/annotation/list.tsx b/web/app/components/app/annotation/list.tsx index 33c3b6f125a07f..e6993fa5cb8aa3 100644 --- a/web/app/components/app/annotation/list.tsx +++ b/web/app/components/app/annotation/list.tsx @@ -3,7 +3,8 @@ import type { FC } from 'react' import React from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' -import { Edit02, Trash03 } from '../../base/icons/src/vender/line/general' +import { RiDeleteBinLine } from '@remixicon/react' +import { Edit02 } from '../../base/icons/src/vender/line/general' import s from './style.module.css' import type { AnnotationItem } from './type' import RemoveAnnotationConfirmModal from './remove-annotation-confirm-modal' @@ -77,7 +78,7 @@ const List: FC = ({ setShowConfirmDelete(true) }} > - +
diff --git a/web/app/components/app/app-publisher/index.tsx b/web/app/components/app/app-publisher/index.tsx index 29b556705b3a0d..c330b4c270ce43 100644 --- a/web/app/components/app/app-publisher/index.tsx +++ b/web/app/components/app/app-publisher/index.tsx @@ -5,7 +5,7 @@ import { } from 'react' import { useTranslation } from 'react-i18next' import dayjs from 'dayjs' -import classNames from 'classnames' +import { RiArrowDownSLine } from '@remixicon/react' import type { ModelAndParameter } from '../configuration/debug/types' import SuggestedAction from './suggested-action' import PublishWithMultipleModel from './publish-with-multiple-model' @@ -18,7 +18,6 @@ import { import EmbeddedModal from '@/app/components/app/overview/embedded' import { useStore as useAppStore } from '@/app/components/app/store' import { useGetLanguage } from '@/context/i18n' -import { ChevronDown } from '@/app/components/base/icons/src/vender/line/arrows' import { PlayCircle } from '@/app/components/base/icons/src/vender/line/mediaAndDevices' import { CodeBrowser } from '@/app/components/base/icons/src/vender/line/development' import { LeftIndent02 } from '@/app/components/base/icons/src/vender/line/editor' @@ -120,13 +119,11 @@ const AppPublisher = ({ @@ -143,9 +140,10 @@ const AppPublisher = ({ diff --git a/web/app/components/app/chat/citation/index.tsx b/web/app/components/app/chat/citation/index.tsx index 4d9087edb31ceb..4bed9638d367a8 100644 --- a/web/app/components/app/chat/citation/index.tsx +++ b/web/app/components/app/chat/citation/index.tsx @@ -1,9 +1,9 @@ import { useEffect, useMemo, useRef, useState } from 'react' import type { FC } from 'react' import { useTranslation } from 'react-i18next' +import { RiArrowDownSLine } from '@remixicon/react' import type { CitationItem } from '../type' import Popup from './popup' -import { ChevronDown } from '@/app/components/base/icons/src/vender/line/arrows' export type Resources = { documentId: string @@ -112,7 +112,7 @@ const Citation: FC = ({ { !showMore ? `+ ${resourcesLength - limitNumberInOneLine}` - : + : } ) diff --git a/web/app/components/app/chat/thought/tool.tsx b/web/app/components/app/chat/thought/tool.tsx index 91423578288b1c..707bc2d5e4bc83 100644 --- a/web/app/components/app/chat/thought/tool.tsx +++ b/web/app/components/app/chat/thought/tool.tsx @@ -4,10 +4,12 @@ import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { + RiArrowDownSLine, + RiLoader2Line, +} from '@remixicon/react' import type { ToolInfoInThought } from '../type' import Panel from './panel' -import { Loading02 } from '@/app/components/base/icons/src/vender/line/general' -import { ChevronDown } from '@/app/components/base/icons/src/vender/line/arrows' import { CheckCircle } from '@/app/components/base/icons/src/vender/solid/general' import { DataSet as DataSetIcon } from '@/app/components/base/icons/src/public/thought' import type { Emoji } from '@/app/components/tools/types' @@ -62,7 +64,7 @@ const Tool: FC = ({ onClick={() => setIsShowDetail(!isShowDetail)} > {!isFinished && ( - + )} {isFinished && !isShowDetail && ( @@ -79,7 +81,7 @@ const Tool: FC = ({ > {toolLabel} - diff --git a/web/app/components/app/configuration/base/warning-mask/cannot-query-dataset.tsx b/web/app/components/app/configuration/base/warning-mask/cannot-query-dataset.tsx index ff9b7eaee77116..52756189a10432 100644 --- a/web/app/components/app/configuration/base/warning-mask/cannot-query-dataset.tsx +++ b/web/app/components/app/configuration/base/warning-mask/cannot-query-dataset.tsx @@ -20,7 +20,7 @@ const FormattingChanged: FC = ({ description={t('appDebug.feature.dataSet.queryVariable.unableToQueryDataSetTip')} footer={
-
diff --git a/web/app/components/app/configuration/base/warning-mask/formatting-changed.tsx b/web/app/components/app/configuration/base/warning-mask/formatting-changed.tsx index 50e05310c838d3..35c2283d154c38 100644 --- a/web/app/components/app/configuration/base/warning-mask/formatting-changed.tsx +++ b/web/app/components/app/configuration/base/warning-mask/formatting-changed.tsx @@ -28,7 +28,7 @@ const FormattingChanged: FC = ({ description={t('appDebug.formattingChangedText')} footer={
- diff --git a/web/app/components/app/configuration/base/warning-mask/has-not-set-api.tsx b/web/app/components/app/configuration/base/warning-mask/has-not-set-api.tsx index 67ed4250cf336b..d83f9d920f9f03 100644 --- a/web/app/components/app/configuration/base/warning-mask/has-not-set-api.tsx +++ b/web/app/components/app/configuration/base/warning-mask/has-not-set-api.tsx @@ -28,7 +28,7 @@ const HasNotSetAPI: FC = ({ title={isTrailFinished ? t('appDebug.notSetAPIKey.trailFinished') : t('appDebug.notSetAPIKey.title')} description={t('appDebug.notSetAPIKey.description')} footer={ - } diff --git a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx index fe382565d5589d..00f47328a49c20 100644 --- a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx @@ -7,19 +7,25 @@ import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { useBoolean } from 'ahooks' import produce from 'immer' +import { + RiDeleteBinLine, + RiErrorWarningFill, + RiQuestionLine, +} from '@remixicon/react' import s from './style.module.css' import MessageTypeSelector from './message-type-selector' import ConfirmAddVar from './confirm-add-var' import PromptEditorHeightResizeWrap from './prompt-editor-height-resize-wrap' import type { PromptRole, PromptVariable } from '@/models/debug' -import { HelpCircle, Trash03 } from '@/app/components/base/icons/src/vender/line/general' -import { Clipboard, ClipboardCheck } from '@/app/components/base/icons/src/vender/line/files' +import { + Clipboard, + ClipboardCheck, +} from '@/app/components/base/icons/src/vender/line/files' import Tooltip from '@/app/components/base/tooltip' import PromptEditor from '@/app/components/base/prompt-editor' import ConfigContext from '@/context/debug-configuration' import { getNewVar, getVars } from '@/utils/var' import { AppType } from '@/types/app' -import { AlertCircle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback' import { useModalContext } from '@/context/modal-context' import type { ExternalDataTool } from '@/models/common' import { useToastContext } from '@/app/components/base/toast' @@ -140,7 +146,7 @@ const AdvancedPromptInput: FC = ({ }} >
- +
{t('appDebug.promptMode.contextMissing')}
= ({ {t('appDebug.promptTip')}
} selector='config-prompt-tooltip'> - +
)}
{canDelete && ( - + )} {!isCopied ? ( diff --git a/web/app/components/app/configuration/config-prompt/confirm-add-var/index.tsx b/web/app/components/app/configuration/config-prompt/confirm-add-var/index.tsx index 38974c23ddbc8c..bfe51379655c0d 100644 --- a/web/app/components/app/configuration/config-prompt/confirm-add-var/index.tsx +++ b/web/app/components/app/configuration/config-prompt/confirm-add-var/index.tsx @@ -62,8 +62,8 @@ const ConfirmAddVar: FC = ({
- - + +
diff --git a/web/app/components/app/configuration/config-prompt/index.tsx b/web/app/components/app/configuration/config-prompt/index.tsx index ce2c270472c634..bea4a9e455ed81 100644 --- a/web/app/components/app/configuration/config-prompt/index.tsx +++ b/web/app/components/app/configuration/config-prompt/index.tsx @@ -3,6 +3,9 @@ import type { FC } from 'react' import React from 'react' import { useContext } from 'use-context-selector' import produce from 'immer' +import { + RiAddLine, +} from '@remixicon/react' import { useTranslation } from 'react-i18next' import SimplePromptInput from './simple-prompt-input' import AdvancedMessageInput from '@/app/components/app/configuration/config-prompt/advanced-prompt-input' @@ -10,7 +13,6 @@ import { PromptRole } from '@/models/debug' import type { PromptItem, PromptVariable } from '@/models/debug' import { type AppType, ModelModeType } from '@/types/app' import ConfigContext from '@/context/debug-configuration' -import { Plus } from '@/app/components/base/icons/src/vender/line/general' import { MAX_PROMPT_MESSAGE_LENGTH } from '@/config' export type IPromptProps = { mode: AppType @@ -142,7 +144,7 @@ const Prompt: FC = ({
- +
{t('appDebug.promptMode.operation.addMessage')}
)} diff --git a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx index c75bbf327ecc27..83e835afc042e3 100644 --- a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx @@ -4,6 +4,9 @@ import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import cn from 'classnames' +import { + RiQuestionLine, +} from '@remixicon/react' import produce from 'immer' import { useContext } from 'use-context-selector' import ConfirmAddVar from './confirm-add-var' @@ -13,7 +16,6 @@ import { type PromptVariable } from '@/models/debug' import Tooltip from '@/app/components/base/tooltip' import { AppType } from '@/types/app' import { getNewVar, getVars } from '@/utils/var' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' import AutomaticBtn from '@/app/components/app/configuration/config/automatic/automatic-btn' import type { AutomaticRes } from '@/service/debug' import GetAutomaticResModal from '@/app/components/app/configuration/config/automatic/get-automatic-res' @@ -143,7 +145,7 @@ const Prompt: FC = ({ {t('appDebug.promptTip')} } selector='config-prompt-tooltip'> - + )} diff --git a/web/app/components/app/configuration/config-var/index.tsx b/web/app/components/app/configuration/config-var/index.tsx index 45bb1d05c75490..a4f8b6839fff6e 100644 --- a/web/app/components/app/configuration/config-var/index.tsx +++ b/web/app/components/app/configuration/config-var/index.tsx @@ -6,6 +6,10 @@ import { useBoolean } from 'ahooks' import type { Timeout } from 'ahooks/lib/useRequest/src/types' import { useContext } from 'use-context-selector' import produce from 'immer' +import { + RiDeleteBinLine, + RiQuestionLine, +} from '@remixicon/react' import Panel from '../base/feature-panel' import EditModal from './config-modal' import IconTypeIcon from './input-type-icon' @@ -19,7 +23,7 @@ import { DEFAULT_VALUE_MAX_LEN, getMaxVarNameLength } from '@/config' import { checkKeys, getNewVar } from '@/utils/var' import Switch from '@/app/components/base/switch' import Toast from '@/app/components/base/toast' -import { HelpCircle, Settings01, Trash03 } from '@/app/components/base/icons/src/vender/line/general' +import { Settings01 } from '@/app/components/base/icons/src/vender/line/general' import ConfirmModal from '@/app/components/base/confirm/common' import ConfigContext from '@/context/debug-configuration' import { AppType } from '@/types/app' @@ -281,7 +285,7 @@ const ConfigVar: FC = ({ promptVariables, readonly, onPromptVar {t('appDebug.variableTip')} } selector='config-var-tooltip'> - + )} @@ -358,7 +362,7 @@ const ConfigVar: FC = ({ promptVariables, readonly, onPromptVar
handleRemoveVar(index)} > - +
diff --git a/web/app/components/app/configuration/config-vision/index.tsx b/web/app/components/app/configuration/config-vision/index.tsx index 536717e952cfe2..9b12e059b57cfb 100644 --- a/web/app/components/app/configuration/config-vision/index.tsx +++ b/web/app/components/app/configuration/config-vision/index.tsx @@ -2,10 +2,12 @@ import type { FC } from 'react' import React from 'react' import { useTranslation } from 'react-i18next' +import { + RiQuestionLine, +} from '@remixicon/react' import { useContext } from 'use-context-selector' import Panel from '../base/feature-panel' import ParamConfig from './param-config' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' import Tooltip from '@/app/components/base/tooltip' import Switch from '@/app/components/base/switch' import { Eye } from '@/app/components/base/icons/src/vender/solid/general' @@ -34,7 +36,7 @@ const ConfigVision: FC = () => { {t('appDebug.vision.description')} } selector='config-vision-tooltip'> - + } diff --git a/web/app/components/app/configuration/config-vision/param-config-content.tsx b/web/app/components/app/configuration/config-vision/param-config-content.tsx index aca0b6b48c4df5..89fad411e70f47 100644 --- a/web/app/components/app/configuration/config-vision/param-config-content.tsx +++ b/web/app/components/app/configuration/config-vision/param-config-content.tsx @@ -3,12 +3,14 @@ import type { FC } from 'react' import React from 'react' import { useContext } from 'use-context-selector' import { useTranslation } from 'react-i18next' +import { + RiQuestionLine, +} from '@remixicon/react' import RadioGroup from './radio-group' import ConfigContext from '@/context/debug-configuration' import { Resolution, TransferMethod } from '@/types/app' import ParamItem from '@/app/components/base/param-item' import Tooltip from '@/app/components/base/tooltip' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' const MIN = 1 const MAX = 6 @@ -40,7 +42,7 @@ const ParamConfigContent: FC = () => {
{item}
))} } selector='config-resolution-tooltip'> - + { const { t } = useTranslation() @@ -46,7 +48,7 @@ const VoiceParamConfig: FC = () => {
{item}
))} } selector='config-resolution-tooltip'> - + = ({ return ( <> - diff --git a/web/app/components/app/configuration/config/agent/agent-setting/index.tsx b/web/app/components/app/configuration/config/agent/agent-setting/index.tsx index 2aad4421f6d31d..b295a4e709bac9 100644 --- a/web/app/components/app/configuration/config/agent/agent-setting/index.tsx +++ b/web/app/components/app/configuration/config/agent/agent-setting/index.tsx @@ -2,9 +2,9 @@ import type { FC } from 'react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' +import { RiCloseLine } from '@remixicon/react' import ItemPanel from './item-panel' import Button from '@/app/components/base/button' -import { XClose } from '@/app/components/base/icons/src/vender/line/general' import { CuteRobote } from '@/app/components/base/icons/src/vender/solid/communication' import { Unblur } from '@/app/components/base/icons/src/vender/solid/education' import Slider from '@/app/components/base/slider' @@ -53,7 +53,7 @@ const AgentSetting: FC = ({ onClick={onCancel} className='flex justify-center items-center w-6 h-6 cursor-pointer' > - + @@ -137,13 +137,12 @@ const AgentSetting: FC = ({ > - + ) diff --git a/web/app/components/app/configuration/dataset-config/card-item/item.tsx b/web/app/components/app/configuration/dataset-config/card-item/item.tsx index bc72b7d2998d8b..2b3f04d2d34fc2 100644 --- a/web/app/components/app/configuration/dataset-config/card-item/item.tsx +++ b/web/app/components/app/configuration/dataset-config/card-item/item.tsx @@ -2,12 +2,13 @@ import type { FC } from 'react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' +import { RiDeleteBinLine } from '@remixicon/react' import SettingsModal from '../settings-modal' import type { DataSet } from '@/models/datasets' import { DataSourceType } from '@/models/datasets' import { formatNumber } from '@/utils/format' import FileIcon from '@/app/components/base/file-icon' -import { Settings01, Trash03 } from '@/app/components/base/icons/src/vender/line/general' +import { Settings01 } from '@/app/components/base/icons/src/vender/line/general' import { Folder } from '@/app/components/base/icons/src/vender/solid/files' import Drawer from '@/app/components/base/drawer' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' @@ -77,7 +78,7 @@ const Item: FC = ({ className='group/action flex items-center justify-center w-6 h-6 hover:bg-[#FEE4E2] rounded-md cursor-pointer' onClick={() => onRemove(config.id)} > - + setShowSettingsModal(false)} footer={null} mask={isMobile} panelClassname='mt-16 mx-2 sm:mr-2 mb-3 !p-0 !max-w-[640px] rounded-xl'> diff --git a/web/app/components/app/configuration/dataset-config/context-var/index.tsx b/web/app/components/app/configuration/dataset-config/context-var/index.tsx index c38176d9b6bbbd..d320adcc77cb24 100644 --- a/web/app/components/app/configuration/dataset-config/context-var/index.tsx +++ b/web/app/components/app/configuration/dataset-config/context-var/index.tsx @@ -3,11 +3,13 @@ import type { FC } from 'react' import React from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { + RiQuestionLine, +} from '@remixicon/react' import type { Props } from './var-picker' import VarPicker from './var-picker' import { BracketsX } from '@/app/components/base/icons/src/vender/line/development' import Tooltip from '@/app/components/base/tooltip' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' const ContextVar: FC = (props) => { const { t } = useTranslation() @@ -27,7 +29,7 @@ const ContextVar: FC = (props) => { } selector='context-var-tooltip' > - + diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index b795c928a51123..640c78d2028931 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -2,6 +2,9 @@ import React from 'react' import type { FC } from 'react' import { useTranslation } from 'react-i18next' +import { + RiQuestionLine, +} from '@remixicon/react' import TopKItem from '@/app/components/base/param-item/top-k-item' import ScoreThresholdItem from '@/app/components/base/param-item/score-threshold-item' import RadioCard from '@/app/components/base/radio-card/simple' @@ -19,7 +22,6 @@ import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/com import type { ModelConfig } from '@/app/components/workflow/types' import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal' import TooltipPlus from '@/app/components/base/tooltip-plus' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' type Props = { @@ -155,7 +157,7 @@ const ConfigContent: FC = ({ - + = ({ {selected.length > 0 && `${selected.length} ${t('appDebug.feature.dataSet.selected')}`}
- - + +
)} diff --git a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx index d4c3bcb0138579..266d3dce9415c5 100644 --- a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx +++ b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx @@ -3,6 +3,7 @@ import { useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import { isEqual } from 'lodash-es' import cn from 'classnames' +import { RiCloseLine } from '@remixicon/react' import { BookOpenIcon } from '@heroicons/react/24/outline' import IndexMethodRadio from '@/app/components/datasets/settings/index-method-radio' import Button from '@/app/components/base/button' @@ -10,7 +11,6 @@ import type { DataSet } from '@/models/datasets' import { useToastContext } from '@/app/components/base/toast' import { updateDatasetSetting } from '@/service/datasets' import { useModalContext } from '@/context/modal-context' -import { XClose } from '@/app/components/base/icons/src/vender/line/general' import type { RetrievalConfig } from '@/types/app' import RetrievalMethodConfig from '@/app/components/datasets/common/retrieval-method-config' import EconomicalRetrievalMethodConfig from '@/app/components/datasets/common/economical-retrieval-method-config' @@ -136,7 +136,7 @@ const SettingsModal: FC = ({ onClick={onCancel} className='flex justify-center items-center w-6 h-6 cursor-pointer' > - + @@ -262,7 +262,7 @@ const SettingsModal: FC = ({ e.stopPropagation() e.nativeEvent.stopImmediatePropagation() }}> - + )} @@ -275,13 +275,12 @@ const SettingsModal: FC = ({ >
@@ -413,7 +412,7 @@ const Debug: FC = ({ : null } {mode !== AppType.completion && ( - + +
) : ( @@ -192,7 +207,7 @@ const OpeningStatement: FC = ({ setTempSuggestedQuestions(tempSuggestedQuestions.filter((_, i) => index !== i)) }} > - + ) @@ -201,7 +216,7 @@ const OpeningStatement: FC = ({
{ setTempSuggestedQuestions([...tempSuggestedQuestions, '']) }} className='mt-1 flex items-center h-9 px-3 gap-2 rounded-lg cursor-pointer text-gray-400 bg-gray-100 hover:bg-gray-200'> - +
{t('appDebug.variableConig.addOption')}
)} diff --git a/web/app/components/app/configuration/features/chat-group/suggested-questions-after-answer/index.tsx b/web/app/components/app/configuration/features/chat-group/suggested-questions-after-answer/index.tsx index 533d2494874f7c..e27eec46c8b683 100644 --- a/web/app/components/app/configuration/features/chat-group/suggested-questions-after-answer/index.tsx +++ b/web/app/components/app/configuration/features/chat-group/suggested-questions-after-answer/index.tsx @@ -2,10 +2,12 @@ import type { FC } from 'react' import React from 'react' import { useTranslation } from 'react-i18next' +import { + RiQuestionLine, +} from '@remixicon/react' import Panel from '@/app/components/app/configuration/base/feature-panel' import SuggestedQuestionsAfterAnswerIcon from '@/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon' import Tooltip from '@/app/components/base/tooltip' -import { HelpCircle } from '@/app/components/base/icons/src/vender/line/general' const SuggestedQuestionsAfterAnswer: FC = () => { const { t } = useTranslation() @@ -18,7 +20,7 @@ const SuggestedQuestionsAfterAnswer: FC = () => { {t('appDebug.feature.suggestedQuestionsAfterAnswer.description')} } selector='suggestion-question-tooltip'> - + } diff --git a/web/app/components/app/configuration/prompt-value-panel/index.tsx b/web/app/components/app/configuration/prompt-value-panel/index.tsx index 52409b4b5f7ed7..0192024c83e527 100644 --- a/web/app/components/app/configuration/prompt-value-panel/index.tsx +++ b/web/app/components/app/configuration/prompt-value-panel/index.tsx @@ -3,6 +3,10 @@ import type { FC } from 'react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' +import { + RiArrowDownSLine, + RiArrowRightLine, +} from '@remixicon/react' import { PlayIcon, } from '@heroicons/react/24/solid' @@ -12,7 +16,6 @@ import { AppType, ModelModeType } from '@/types/app' import Select from '@/app/components/base/select' import { DEFAULT_VALUE_MAX_LEN } from '@/config' import Button from '@/app/components/base/button' -import { ChevronDown, ChevronRight } from '@/app/components/base/icons/src/vender/line/arrows' import Tooltip from '@/app/components/base/tooltip-plus' import TextGenerationImageUploader from '@/app/components/base/image-uploader/text-generation-image-uploader' import type { VisionFile, VisionSettings } from '@/types/app' @@ -102,8 +105,8 @@ const PromptValuePanel: FC = ({
setUserInputFieldCollapse(!userInputFieldCollapse)}> { userInputFieldCollapse - ? - : + ? + : }
{t('appDebug.inputs.userInputField')}
@@ -195,7 +198,6 @@ const PromptValuePanel: FC = ({
- +
{children}
@@ -95,6 +99,7 @@ const AnnotationReplyConfig: FC = ({ setIsShowEdit(false) }} onSave={async (embeddingModel, score) => { + const annotationConfig = await fetchAnnotationConfig(appId) as AnnotationReplyConfigType let isEmbeddingModelChanged = false if ( embeddingModel.embedding_model_name !== annotationConfig.embedding_model.embedding_model_name diff --git a/web/app/components/app/configuration/toolbox/moderation/moderation-setting-modal.tsx b/web/app/components/app/configuration/toolbox/moderation/moderation-setting-modal.tsx index 66d0a0ef68cb46..64b2dd222a816f 100644 --- a/web/app/components/app/configuration/toolbox/moderation/moderation-setting-modal.tsx +++ b/web/app/components/app/configuration/toolbox/moderation/moderation-setting-modal.tsx @@ -354,13 +354,12 @@ const ModerationSettingModal: FC = ({
@@ -106,7 +108,7 @@ const Tools = () => { {t('appDebug.feature.tools.title')} {t('appDebug.feature.tools.tips')}}> - + { @@ -121,7 +123,7 @@ const Tools = () => { className='flex items-center h-7 px-3 text-xs font-medium text-gray-700 cursor-pointer' onClick={() => handleOpenExternalDataToolModal({}, -1)} > - + {t('common.operation.add')} @@ -165,7 +167,7 @@ const Tools = () => { className='hidden group/action group-hover:flex items-center justify-center w-6 h-6 hover:bg-[#FEE4E2] rounded-md cursor-pointer' onClick={() => setExternalDataToolsConfig([...externalDataToolsConfig.slice(0, index), ...externalDataToolsConfig.slice(index + 1)])} > - +
- +
) diff --git a/web/app/components/app/create-app-modal/index.tsx b/web/app/components/app/create-app-modal/index.tsx index 05a473b8e54e34..11e265e9ad606f 100644 --- a/web/app/components/app/create-app-modal/index.tsx +++ b/web/app/components/app/create-app-modal/index.tsx @@ -3,6 +3,10 @@ import type { MouseEventHandler } from 'react' import { useCallback, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import cn from 'classnames' +import { + RiCloseLine, + RiQuestionLine, +} from '@remixicon/react' import { useRouter } from 'next/navigation' import { useContext, useContextSelector } from 'use-context-selector' import s from './style.module.css' @@ -17,7 +21,6 @@ import AppIcon from '@/app/components/base/app-icon' import EmojiPicker from '@/app/components/base/emoji-picker' import AppsFull from '@/app/components/billing/apps-full-in-dialog' import { AiText, ChatBot, CuteRobote } from '@/app/components/base/icons/src/vender/solid/communication' -import { HelpCircle, XClose } from '@/app/components/base/icons/src/vender/line/general' import { Route } from '@/app/components/base/icons/src/vender/solid/mapsAndTravel' import TooltipPlus from '@/app/components/base/tooltip-plus' import { NEED_REFRESH_APP_LIST_KEY } from '@/config' @@ -202,7 +205,7 @@ const CreateAppModal = ({ show, onSuccess, onClose }: CreateAppDialogProps) => {
{t('app.newApp.basic')}
- +
{ BETA
- +
{
{t('app.newApp.captionDescription')}