diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml
index 8173bee58e8e24..7c632f8a34d56a 100644
--- a/.github/workflows/api-tests.yml
+++ b/.github/workflows/api-tests.yml
@@ -76,7 +76,7 @@ jobs:
- name: Run Workflow
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
- - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale)
+ - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch)
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
@@ -90,5 +90,6 @@ jobs:
pgvecto-rs
pgvector
chroma
+ elasticsearch
- name: Test Vector Stores
run: poetry run -C api bash dev/pytest/pytest_vdb.sh
diff --git a/.github/workflows/expose_service_ports.sh b/.github/workflows/expose_service_ports.sh
index 3418bf0c6f6688..ae3e0ee69d8cfb 100755
--- a/.github/workflows/expose_service_ports.sh
+++ b/.github/workflows/expose_service_ports.sh
@@ -6,5 +6,6 @@ yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
+yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
-echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs."
\ No newline at end of file
+echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch"
\ No newline at end of file
diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index f6092c86337d85..d681dc66276dd1 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -45,6 +45,10 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example
+ - name: Ruff formatter check
+ if: steps.changed-files.outputs.any_changed == 'true'
+ run: poetry run -C api ruff format --check ./api
+
- name: Lint hints
if: failure()
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."
diff --git a/.gitignore b/.gitignore
index c52b9d8bbf5bca..22aba6536479e8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -178,3 +178,4 @@ pyrightconfig.json
api/.vscode
.idea/
+.vscode
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index f810584f24115c..8f57cd545ee308 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,7 +8,7 @@ In terms of licensing, please take a minute to read our short [License and Contr
## Before you jump in
-[Find](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
+[Find](https://github.com/langgenius/dify/issues?q=is:issue+is:open) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
### Feature requests:
diff --git a/CONTRIBUTING_CN.md b/CONTRIBUTING_CN.md
index 303c2513f53b9b..7cd2bb60ebca04 100644
--- a/CONTRIBUTING_CN.md
+++ b/CONTRIBUTING_CN.md
@@ -8,7 +8,7 @@
## 在开始之前
-[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
+[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:open)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
### 功能请求:
diff --git a/CONTRIBUTING_JA.md b/CONTRIBUTING_JA.md
index 6d5bfb205c31dc..a68bdeddbc830f 100644
--- a/CONTRIBUTING_JA.md
+++ b/CONTRIBUTING_JA.md
@@ -10,7 +10,7 @@ Dify にコントリビュートしたいとお考えなのですね。それは
## 飛び込む前に
-[既存の Issue](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) を探すか、[新しい Issue](https://github.com/langgenius/dify/issues/new/choose) を作成してください。私たちは Issue を 2 つのタイプに分類しています。
+[既存の Issue](https://github.com/langgenius/dify/issues?q=is:issue+is:open) を探すか、[新しい Issue](https://github.com/langgenius/dify/issues/new/choose) を作成してください。私たちは Issue を 2 つのタイプに分類しています。
### 機能リクエスト
diff --git a/CONTRIBUTING_VI.md b/CONTRIBUTING_VI.md
new file mode 100644
index 00000000000000..80e68a046ec5fc
--- /dev/null
+++ b/CONTRIBUTING_VI.md
@@ -0,0 +1,156 @@
+Thật tuyệt vời khi bạn muốn đóng góp cho Dify! Chúng tôi rất mong chờ được thấy những gì bạn sẽ làm. Là một startup với nguồn nhân lực và tài chính hạn chế, chúng tôi có tham vọng lớn là thiết kế quy trình trực quan nhất để xây dựng và quản lý các ứng dụng LLM. Mọi sự giúp đỡ từ cộng đồng đều rất quý giá đối với chúng tôi.
+
+Chúng tôi cần linh hoạt và làm việc nhanh chóng, nhưng đồng thời cũng muốn đảm bảo các cộng tác viên như bạn có trải nghiệm đóng góp thuận lợi nhất có thể. Chúng tôi đã tạo ra hướng dẫn đóng góp này nhằm giúp bạn làm quen với codebase và cách chúng tôi làm việc với các cộng tác viên, để bạn có thể nhanh chóng bắt tay vào phần thú vị.
+
+Hướng dẫn này, cũng như bản thân Dify, đang trong quá trình cải tiến liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó không theo kịp dự án thực tế, và chúng tôi luôn hoan nghênh mọi phản hồi để cải thiện.
+
+Về vấn đề cấp phép, xin vui lòng dành chút thời gian đọc qua [Thỏa thuận Cấp phép và Đóng góp](./LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân thủ [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
+
+## Trước khi bắt đầu
+
+[Tìm kiếm](https://github.com/langgenius/dify/issues?q=is:issue+is:open) một vấn đề hiện có, hoặc [tạo mới](https://github.com/langgenius/dify/issues/new/choose) một vấn đề. Chúng tôi phân loại các vấn đề thành 2 loại:
+
+### Yêu cầu tính năng:
+
+* Nếu bạn đang tạo một yêu cầu tính năng mới, chúng tôi muốn bạn giải thích tính năng đề xuất sẽ đạt được điều gì và cung cấp càng nhiều thông tin chi tiết càng tốt. [@perzeusss](https://github.com/perzeuss) đã tạo một [Trợ lý Yêu cầu Tính năng](https://udify.app/chat/MK2kVSnw1gakVwMX) rất hữu ích để giúp bạn soạn thảo nhu cầu của mình. Hãy thử dùng nó nhé.
+
+* Nếu bạn muốn chọn một vấn đề từ danh sách hiện có, chỉ cần để lại bình luận dưới vấn đề đó nói rằng bạn sẽ làm.
+
+ Một thành viên trong nhóm làm việc trong lĩnh vực liên quan sẽ được thông báo. Nếu mọi thứ ổn, họ sẽ cho phép bạn bắt đầu code. Chúng tôi yêu cầu bạn chờ đợi cho đến lúc đó trước khi bắt tay vào làm tính năng, để không lãng phí công sức của bạn nếu chúng tôi đề xuất thay đổi.
+
+ Tùy thuộc vào lĩnh vực mà tính năng đề xuất thuộc về, bạn có thể nói chuyện với các thành viên khác nhau trong nhóm. Dưới đây là danh sách các lĩnh vực mà các thành viên trong nhóm chúng tôi đang làm việc hiện tại:
+
+ | Thành viên | Phạm vi |
+ | ------------------------------------------------------------ | ---------------------------------------------------- |
+ | [@yeuoly](https://github.com/Yeuoly) | Thiết kế kiến trúc Agents |
+ | [@jyong](https://github.com/JohnJyong) | Thiết kế quy trình RAG |
+ | [@GarfieldDai](https://github.com/GarfieldDai) | Xây dựng quy trình làm việc |
+ | [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | Làm cho giao diện người dùng dễ sử dụng |
+ | [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | Trải nghiệm nhà phát triển, đầu mối liên hệ cho mọi vấn đề |
+ | [@takatost](https://github.com/takatost) | Định hướng và kiến trúc tổng thể sản phẩm |
+
+ Cách chúng tôi ưu tiên:
+
+ | Loại tính năng | Mức độ ưu tiên |
+ | ------------------------------------------------------------ | -------------- |
+ | Tính năng ưu tiên cao được gắn nhãn bởi thành viên trong nhóm | Ưu tiên cao |
+ | Yêu cầu tính năng phổ biến từ [bảng phản hồi cộng đồng](https://github.com/langgenius/dify/discussions/categories/feedbacks) của chúng tôi | Ưu tiên trung bình |
+ | Tính năng không quan trọng và cải tiến nhỏ | Ưu tiên thấp |
+ | Có giá trị nhưng không cấp bách | Tính năng tương lai |
+
+### Những vấn đề khác (ví dụ: báo cáo lỗi, tối ưu hiệu suất, sửa lỗi chính tả):
+
+* Bắt đầu code ngay lập tức.
+
+ Cách chúng tôi ưu tiên:
+
+ | Loại vấn đề | Mức độ ưu tiên |
+ | ------------------------------------------------------------ | -------------- |
+ | Lỗi trong các chức năng chính (không thể đăng nhập, ứng dụng không hoạt động, lỗ hổng bảo mật) | Nghiêm trọng |
+ | Lỗi không quan trọng, cải thiện hiệu suất | Ưu tiên trung bình |
+ | Sửa lỗi nhỏ (lỗi chính tả, giao diện người dùng gây nhầm lẫn nhưng vẫn hoạt động) | Ưu tiên thấp |
+
+
+## Cài đặt
+
+Dưới đây là các bước để thiết lập Dify cho việc phát triển:
+
+### 1. Fork repository này
+
+### 2. Clone repository
+
+ Clone repository đã fork từ terminal của bạn:
+
+```
+git clone git@github.com:/dify.git
+```
+
+### 3. Kiểm tra các phụ thuộc
+
+Dify yêu cầu các phụ thuộc sau để build, hãy đảm bảo chúng đã được cài đặt trên hệ thống của bạn:
+
+- [Docker](https://www.docker.com/)
+- [Docker Compose](https://docs.docker.com/compose/install/)
+- [Node.js v18.x (LTS)](http://nodejs.org)
+- [npm](https://www.npmjs.com/) phiên bản 8.x.x hoặc [Yarn](https://yarnpkg.com/)
+- [Python](https://www.python.org/) phiên bản 3.10.x
+
+### 4. Cài đặt
+
+Dify bao gồm một backend và một frontend. Đi đến thư mục backend bằng lệnh `cd api/`, sau đó làm theo hướng dẫn trong [README của Backend](api/README.md) để cài đặt. Trong một terminal khác, đi đến thư mục frontend bằng lệnh `cd web/`, sau đó làm theo hướng dẫn trong [README của Frontend](web/README.md) để cài đặt.
+
+Kiểm tra [FAQ về cài đặt](https://docs.dify.ai/learn-more/faq/self-host-faq) để xem danh sách các vấn đề thường gặp và các bước khắc phục.
+
+### 5. Truy cập Dify trong trình duyệt của bạn
+
+Để xác nhận cài đặt của bạn, hãy truy cập [http://localhost:3000](http://localhost:3000) (địa chỉ mặc định, hoặc URL và cổng bạn đã cấu hình) trong trình duyệt. Bạn sẽ thấy Dify đang chạy.
+
+## Phát triển
+
+Nếu bạn đang thêm một nhà cung cấp mô hình, [hướng dẫn này](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md) dành cho bạn.
+
+Nếu bạn đang thêm một nhà cung cấp công cụ cho Agent hoặc Workflow, [hướng dẫn này](./api/core/tools/README.md) dành cho bạn.
+
+Để giúp bạn nhanh chóng định hướng phần đóng góp của mình, dưới đây là một bản phác thảo ngắn gọn về cấu trúc backend & frontend của Dify:
+
+### Backend
+
+Backend của Dify được viết bằng Python sử dụng [Flask](https://flask.palletsprojects.com/en/3.0.x/). Nó sử dụng [SQLAlchemy](https://www.sqlalchemy.org/) cho ORM và [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) cho hàng đợi tác vụ. Logic xác thực được thực hiện thông qua Flask-login.
+
+```
+[api/]
+├── constants // Các cài đặt hằng số được sử dụng trong toàn bộ codebase.
+├── controllers // Định nghĩa các route API và logic xử lý yêu cầu.
+├── core // Điều phối ứng dụng cốt lõi, tích hợp mô hình và công cụ.
+├── docker // Cấu hình liên quan đến Docker & containerization.
+├── events // Xử lý và xử lý sự kiện
+├── extensions // Mở rộng với các framework/nền tảng bên thứ 3.
+├── fields // Định nghĩa trường cho serialization/marshalling.
+├── libs // Thư viện và tiện ích có thể tái sử dụng.
+├── migrations // Script cho việc di chuyển cơ sở dữ liệu.
+├── models // Mô hình cơ sở dữ liệu & định nghĩa schema.
+├── services // Xác định logic nghiệp vụ.
+├── storage // Lưu trữ khóa riêng tư.
+├── tasks // Xử lý các tác vụ bất đồng bộ và công việc nền.
+└── tests
+```
+
+### Frontend
+
+Website được khởi tạo trên boilerplate [Next.js](https://nextjs.org/) bằng Typescript và sử dụng [Tailwind CSS](https://tailwindcss.com/) cho styling. [React-i18next](https://react.i18next.com/) được sử dụng cho việc quốc tế hóa.
+
+```
+[web/]
+├── app // layouts, pages và components
+│ ├── (commonLayout) // layout chung được sử dụng trong toàn bộ ứng dụng
+│ ├── (shareLayout) // layouts được chia sẻ cụ thể cho các phiên dựa trên token
+│ ├── activate // trang kích hoạt
+│ ├── components // được chia sẻ bởi các trang và layouts
+│ ├── install // trang cài đặt
+│ ├── signin // trang đăng nhập
+│ └── styles // styles được chia sẻ toàn cục
+├── assets // Tài nguyên tĩnh
+├── bin // scripts chạy ở bước build
+├── config // cài đặt và tùy chọn có thể điều chỉnh
+├── context // contexts được chia sẻ bởi các phần khác nhau của ứng dụng
+├── dictionaries // File dịch cho từng ngôn ngữ
+├── docker // cấu hình container
+├── hooks // Hooks có thể tái sử dụng
+├── i18n // Cấu hình quốc tế hóa
+├── models // mô tả các mô hình dữ liệu & hình dạng của phản hồi API
+├── public // tài nguyên meta như favicon
+├── service // xác định hình dạng của các hành động API
+├── test
+├── types // mô tả các tham số hàm và giá trị trả về
+└── utils // Các hàm tiện ích được chia sẻ
+```
+
+## Gửi PR của bạn
+
+Cuối cùng, đã đến lúc mở một pull request (PR) đến repository của chúng tôi. Đối với các tính năng lớn, chúng tôi sẽ merge chúng vào nhánh `deploy/dev` để kiểm tra trước khi đưa vào nhánh `main`. Nếu bạn gặp vấn đề như xung đột merge hoặc không biết cách mở pull request, hãy xem [hướng dẫn về pull request của GitHub](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests).
+
+Và thế là xong! Khi PR của bạn được merge, bạn sẽ được giới thiệu là một người đóng góp trong [README](https://github.com/langgenius/dify/blob/main/README.md) của chúng tôi.
+
+## Nhận trợ giúp
+
+Nếu bạn gặp khó khăn hoặc có câu hỏi cấp bách trong quá trình đóng góp, hãy đặt câu hỏi của bạn trong vấn đề GitHub liên quan, hoặc tham gia [Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi để trò chuyện nhanh chóng.
\ No newline at end of file
diff --git a/README.md b/README.md
index 0dd0d862549afc..1c49c415fe09a9 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
Dify Cloud ·
Self-hosting ·
Documentation ·
- Enterprise inquiry
+ Enterprise inquiry
@@ -38,6 +38,7 @@
+
@@ -65,7 +66,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
**5. Agent capabilities**:
- You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DELL·E, Stable Diffusion and WolframAlpha.
+ You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
**6. LLMOps**:
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
@@ -151,7 +152,7 @@ Quickly get Dify running in your environment with this [starter guide](#quick-st
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations**
-We provide additional enterprise-centric features. [Schedule a meeting with us](https://cal.com/guchenhe/30min) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
+We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
@@ -220,23 +221,6 @@ At the same time, please consider supporting Dify by sharing it on social media
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
-Or, schedule a meeting directly with a team member:
-
-
-
- Point of Contact |
- Purpose |
-
-
- |
- Business enquiries & product feedback |
-
-
- |
- Contributions, issues & feature requests |
-
-
-
## Star history
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
diff --git a/README_AR.md b/README_AR.md
index 25229ef460dd24..10d572cc49a83b 100644
--- a/README_AR.md
+++ b/README_AR.md
@@ -4,7 +4,7 @@
Dify Cloud ·
الاستضافة الذاتية ·
التوثيق ·
- استفسارات الشركات
+ استفسار الشركات (للإنجليزية فقط)
@@ -38,6 +38,7 @@
+
@@ -57,7 +58,7 @@
**4. خط أنابيب RAG**: قدرات RAG الواسعة التي تغطي كل شيء من استيعاب الوثائق إلى الاسترجاع، مع الدعم الفوري لاستخراج النص من ملفات PDF و PPT وتنسيقات الوثائق الشائعة الأخرى.
-**5. قدرات الوكيل**: يمكنك تعريف الوكلاء بناءً على أمر وظيفة LLM أو ReAct، وإضافة أدوات مدمجة أو مخصصة للوكيل. توفر Dify أكثر من 50 أداة مدمجة لوكلاء الذكاء الاصطناعي، مثل البحث في Google و DELL·E وStable Diffusion و WolframAlpha.
+**5. قدرات الوكيل**: يمكنك تعريف الوكلاء بناءً على أمر وظيفة LLM أو ReAct، وإضافة أدوات مدمجة أو مخصصة للوكيل. توفر Dify أكثر من 50 أداة مدمجة لوكلاء الذكاء الاصطناعي، مثل البحث في Google و DALL·E وStable Diffusion و WolframAlpha.
**6. الـ LLMOps**: راقب وتحلل سجلات التطبيق والأداء على مر الزمن. يمكنك تحسين الأوامر والبيانات والنماذج باستمرار استنادًا إلى البيانات الإنتاجية والتعليقات.
@@ -203,23 +204,6 @@ docker compose up -d
* [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
* [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
-أو، قم بجدولة اجتماع مباشرة مع أحد أعضاء الفريق:
-
-
-
- نقطة الاتصال |
- الغرض |
-
-
- |
- استفسارات الأعمال واقتراحات حول المنتج |
-
-
- |
- المساهمات والمشكلات وطلبات الميزات |
-
-
-
## تاريخ النجمة
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
diff --git a/README_CN.md b/README_CN.md
index ebbbe2590209dc..32551fcc313932 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -4,7 +4,7 @@
Dify 云服务 ·
自托管 ·
文档 ·
-
预约演示
+
(需用英文)常见问题解答 / 联系团队
@@ -29,14 +29,16 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
@@ -70,7 +72,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
广泛的 RAG 功能,涵盖从文档摄入到检索的所有内容,支持从 PDF、PPT 和其他常见文档格式中提取文本的开箱即用的支持。
**5. Agent 智能体**:
- 您可以基于 LLM 函数调用或 ReAct 定义 Agent,并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了50多种内置工具,如谷歌搜索、DELL·E、Stable Diffusion 和 WolframAlpha 等。
+ 您可以基于 LLM 函数调用或 ReAct 定义 Agent,并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了50多种内置工具,如谷歌搜索、DALL·E、Stable Diffusion 和 WolframAlpha 等。
**6. LLMOps**:
随时间监视和分析应用程序日志和性能。您可以根据生产数据和标注持续改进提示、数据集和模型。
@@ -156,7 +158,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。
- **面向企业/组织的 Dify**
-我们提供额外的面向企业的功能。[与我们安排会议](https://cal.com/guchenhe/30min)或[给我们发送电子邮件](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)讨论企业需求。
+我们提供额外的面向企业的功能。[给我们发送电子邮件](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)讨论企业需求。
> 对于使用 AWS 的初创公司和中小型企业,请查看 [AWS Marketplace 上的 Dify 高级版](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6),并使用一键部署到您自己的 AWS VPC。它是一个价格实惠的 AMI 产品,提供了使用自定义徽标和品牌创建应用程序的选项。
## 保持领先
diff --git a/README_ES.md b/README_ES.md
index 52b78da47d0f40..2ae044b32883b9 100644
--- a/README_ES.md
+++ b/README_ES.md
@@ -4,7 +4,7 @@
Dify Cloud ·
Auto-alojamiento ·
Documentación ·
- Programar demostración
+ Consultas empresariales (en inglés)
@@ -29,14 +29,16 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
#
@@ -70,7 +72,7 @@ Dify es una plataforma de desarrollo de aplicaciones de LLM de código abierto.
**5. Capacidades de agente**:
Puedes definir agent
-es basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DELL·E, Difusión Estable y WolframAlpha.
+es basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DALL·E, Difusión Estable y WolframAlpha.
**6. LLMOps**:
Supervisa y analiza registros de aplicaciones y rendimiento a lo largo del tiempo. Podrías mejorar continuamente prompts, conjuntos de datos y modelos basados en datos de producción y anotaciones.
@@ -156,7 +158,7 @@ Pon rápidamente Dify en funcionamiento en tu entorno con esta [guía de inicio
Usa nuestra [documentación](https://docs.dify.ai) para más referencias e instrucciones más detalladas.
- **Dify para Empresas / Organizaciones**
-Proporcionamos características adicionales centradas en la empresa. [Programa una reunión con nosotros](https://cal.com/guchenhe/30min) o [envíanos un correo electrónico](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir las necesidades empresariales.
+Proporcionamos características adicionales centradas en la empresa. [Envíanos un correo electrónico](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir las necesidades empresariales.
> Para startups y pequeñas empresas que utilizan AWS, echa un vistazo a [Dify Premium en AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e impleméntalo en tu propio VPC de AWS con un clic. Es una AMI asequible que ofrece la opción de crear aplicaciones con logotipo y marca personalizados.
@@ -228,23 +230,6 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en
* [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
* [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
-O, programa una reunión directamente con un miembro del equipo:
-
-
-
- Punto de Contacto |
- Propósito |
-
-
- |
- Consultas comerciales y retroalimentación del producto |
-
-
- |
- Contribuciones, problemas y solicitudes de características |
-
-
-
## Historial de Estrellas
[![Gráfico de Historial de Estrellas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
@@ -256,4 +241,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En
## Licencia
-Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
\ No newline at end of file
+Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
diff --git a/README_FR.md b/README_FR.md
index 17a08812848443..681d596749c9e7 100644
--- a/README_FR.md
+++ b/README_FR.md
@@ -4,7 +4,7 @@
Dify Cloud ·
Auto-hébergement ·
Documentation ·
- Planifier une démo
+ Demande d’entreprise (en anglais seulement)
@@ -29,14 +29,16 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
#
@@ -70,7 +72,7 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
**5. Capac
ités d'agent**:
- Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DELL·E, Stable Diffusion et WolframAlpha.
+ Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
**6. LLMOps**:
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
@@ -156,7 +158,7 @@ Lancez rapidement Dify dans votre environnement avec ce [guide de démarrage](#q
Utilisez notre [documentation](https://docs.dify.ai) pour plus de références et des instructions plus détaillées.
- **Dify pour les entreprises / organisations**
-Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Planifiez une réunion avec nous](https://cal.com/guchenhe/30min) ou [envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise.
+Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise.
> Pour les startups et les petites entreprises utilisant AWS, consultez [Dify Premium sur AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) et déployez-le dans votre propre VPC AWS en un clic. C'est une offre AMI abordable avec la possibilité de créer des applications avec un logo et une marque personnalisés.
@@ -226,23 +228,6 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le
* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
* [Twitter](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
-Ou, planifiez directement une réunion avec un membre de l'équipe:
-
-
-
- Point de contact |
- Objectif |
-
-
- |
- Demandes commerciales & retours produit |
-
-
- |
- Contributions, problèmes & demandes de fonctionnalités |
-
-
-
## Historique des étoiles
[![Graphique de l'historique des étoiles](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
diff --git a/README_JA.md b/README_JA.md
index 5828379a74cf0f..e6a8621e7baae5 100644
--- a/README_JA.md
+++ b/README_JA.md
@@ -4,7 +4,7 @@
Dify Cloud ·
セルフホスティング ·
ドキュメント ·
- デモの予約
+ 企業のお問い合わせ(英語のみ)
@@ -29,14 +29,16 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
#
@@ -69,7 +71,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサーポイントも提供します。
**5. エージェント機能**:
- LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DELL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
+ LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
**6. LLMOps**:
アプリケーションのログやパフォーマンスを監視と分析し、生産のデータと注釈に基づいて、プロンプト、データセット、モデルを継続的に改善できます。
@@ -155,7 +157,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。
- **企業/組織向けのDify**
-企業中心の機能を提供しています。[こちらからミーティングを予約](https://cal.com/guchenhe/30min)したり、[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。
+企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングどして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
@@ -225,28 +227,6 @@ docker compose up -d
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
-または、直接チームメンバーとミーティングをスケジュール:
-
-
-
- 連絡先 |
- 目的 |
-
-
- ミーティング |
- 無料の30分間のミーティングをスケジュール |
-
-
- 技術サポート |
- 技術的な問題やサポートに関する質問 |
-
-
- 営業担当 |
- 法人ライセンスに関するお問い合わせ |
-
-
## ライセンス
diff --git a/README_KL.md b/README_KL.md
index 64d2d2485886fb..04620d42bbec8a 100644
--- a/README_KL.md
+++ b/README_KL.md
@@ -4,7 +4,7 @@
Dify Cloud ·
Self-hosting ·
Documentation ·
- Schedule demo
+ Commercial enquiries
@@ -29,14 +29,16 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
#
@@ -68,7 +70,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
**5. Agent capabilities**:
- You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DELL·E, Stable Diffusion and WolframAlpha.
+ You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
**6. LLMOps**:
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
@@ -156,7 +158,7 @@ Quickly get Dify running in your environment with this [starter guide](#quick-st
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for Enterprise / Organizations**
-We provide additional enterprise-centric features. [Schedule a meeting with us](https://cal.com/guchenhe/30min) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
+We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
@@ -228,23 +230,6 @@ At the same time, please consider supporting Dify by sharing it on social media
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
-Or, schedule a meeting directly with a team member:
-
-
-
- Point of Contact |
- Purpose |
-
-
- |
- Business enquiries & product feedback |
-
-
- |
- Contributions, issues & feature requests |
-
-
-
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
@@ -256,4 +241,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead
## License
-This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.
\ No newline at end of file
+This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.
diff --git a/README_KR.md b/README_KR.md
index 2d7db3df4cac67..a5f3bc68d04d74 100644
--- a/README_KR.md
+++ b/README_KR.md
@@ -4,7 +4,7 @@
Dify 클라우드 ·
셀프-호스팅 ·
문서 ·
- 기업 문의
+ 기업 문의 (영어만 가능)
@@ -35,8 +35,10 @@
-
+
+
+
@@ -64,7 +66,7 @@
문서 수집부터 검색까지 모든 것을 다루며, PDF, PPT 및 기타 일반적인 문서 형식에서 텍스트 추출을 위한 기본 지원이 포함되어 있는 광범위한 RAG 기능을 제공합니다.
**5. 에이전트 기능**:
- LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DELL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다.
+ LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DALL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다.
**6. LLMOps**:
시간 경과에 따른 애플리케이션 로그와 성능을 모니터링하고 분석합니다. 생산 데이터와 주석을 기반으로 프롬프트, 데이터세트, 모델을 지속적으로 개선할 수 있습니다.
@@ -149,7 +151,7 @@
추가 참조 및 더 심층적인 지침은 [문서](https://docs.dify.ai)를 사용하세요.
- **기업 / 조직을 위한 Dify**
- 우리는 추가적인 기업 중심 기능을 제공합니다. 당사와 [미팅일정](https://cal.com/guchenhe/30min)을 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오.
+ 우리는 추가적인 기업 중심 기능을 제공합니다. 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오.
> AWS를 사용하는 스타트업 및 중소기업의 경우 [AWS Marketplace에서 Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)을 확인하고 한 번의 클릭으로 자체 AWS VPC에 배포하십시오. 맞춤형 로고와 브랜딩이 포함된 앱을 생성할 수 있는 옵션이 포함된 저렴한 AMI 제품입니다.
@@ -218,22 +220,6 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
-또는 팀원과 직접 미팅을 예약하세요:
-
-
-
- 연락처 |
- 목적 |
-
-
- |
- 비즈니스 문의 및 제품 피드백 |
-
-
- |
- 기여, 이슈 및 기능 요청 |
-
-
## Star 히스토리
diff --git a/README_TR.md b/README_TR.md
index 2ae7d440a81373..54b6db3f823717 100644
--- a/README_TR.md
+++ b/README_TR.md
@@ -4,7 +4,7 @@
Dify Bulut ·
Kendi Sunucunuzda Barındırma ·
Dokümantasyon ·
- Kurumsal Sorgu
+ Yalnızca İngilizce: Kurumsal Sorgulama
@@ -38,6 +38,7 @@
+
@@ -155,7 +156,7 @@ Bu [başlangıç kılavuzu](#quick-start) ile Dify'ı kendi ortamınızda hızl
Daha fazla referans ve detaylı talimatlar için [dokümantasyonumuzu](https://docs.dify.ai) kullanın.
- **Kurumlar / organizasyonlar için Dify**
-Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bizimle bir toplantı planlayın](https://cal.com/guchenhe/30min) veya [bize bir e-posta gönderin](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry).
+Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bize bir e-posta gönderin](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry).
> AWS kullanan startuplar ve küçük işletmeler için, [AWS Marketplace'deki Dify Premium'a](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) göz atın ve tek tıklamayla kendi AWS VPC'nize dağıtın. Bu, özel logo ve marka ile uygulamalar oluşturma seçeneğine sahip uygun fiyatlı bir AMI teklifdir.
## Güncel Kalma
@@ -223,23 +224,6 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
* [Twitter](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
-Veya doğrudan bir ekip üyesiyle toplantı planlayın:
-
-
-
- İletişim Noktası |
- Amaç |
-
-
- |
- İş sorgulamaları & ürün geri bildirimleri |
-
-
- |
- Katkılar, sorunlar & özellik istekleri |
-
-
-
## Star history
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
diff --git a/README_VI.md b/README_VI.md
new file mode 100644
index 00000000000000..6d4035eceb06de
--- /dev/null
+++ b/README_VI.md
@@ -0,0 +1,234 @@
+![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
+
+
+ Dify Cloud ·
+ Tự triển khai ·
+ Tài liệu ·
+ Yêu cầu doanh nghiệp
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi:
+
+
+**1. Quy trình làm việc**:
+ Xây dựng và kiểm tra các quy trình làm việc AI mạnh mẽ trên một canvas trực quan, tận dụng tất cả các tính năng sau đây và hơn thế nữa.
+
+
+ https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
+
+
+
+**2. Hỗ trợ mô hình toàn diện**:
+ Tích hợp liền mạch với hàng trăm mô hình LLM độc quyền / mã nguồn mở từ hàng chục nhà cung cấp suy luận và giải pháp tự lưu trữ, bao gồm GPT, Mistral, Llama3, và bất kỳ mô hình tương thích API OpenAI nào. Danh sách đầy đủ các nhà cung cấp mô hình được hỗ trợ có thể được tìm thấy [tại đây](https://docs.dify.ai/getting-started/readme/model-providers).
+
+![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
+
+
+**3. IDE Prompt**:
+ Giao diện trực quan để tạo prompt, so sánh hiệu suất mô hình và thêm các tính năng bổ sung như chuyển văn bản thành giọng nói cho một ứng dụng dựa trên trò chuyện.
+
+**4. Mô hình RAG**:
+ Khả năng RAG mở rộng bao gồm mọi thứ từ nhập tài liệu đến truy xuất, với hỗ trợ sẵn có cho việc trích xuất văn bản từ PDF, PPT và các định dạng tài liệu phổ biến khác.
+
+**5. Khả năng tác nhân**:
+ Bạn có thể định nghĩa các tác nhân dựa trên LLM Function Calling hoặc ReAct, và thêm các công cụ được xây dựng sẵn hoặc tùy chỉnh cho tác nhân. Dify cung cấp hơn 50 công cụ tích hợp sẵn cho các tác nhân AI, như Google Search, DALL·E, Stable Diffusion và WolframAlpha.
+
+**6. LLMOps**:
+ Giám sát và phân tích nhật ký và hiệu suất ứng dụng theo thời gian. Bạn có thể liên tục cải thiện prompt, bộ dữ liệu và mô hình dựa trên dữ liệu sản xuất và chú thích.
+
+**7. Backend-as-a-Service**:
+ Tất cả các dịch vụ của Dify đều đi kèm với các API tương ứng, vì vậy bạn có thể dễ dàng tích hợp Dify vào logic kinh doanh của riêng mình.
+
+
+## So sánh tính năng
+
+
+ Tính năng |
+ Dify.AI |
+ LangChain |
+ Flowise |
+ OpenAI Assistants API |
+
+
+ Phương pháp lập trình |
+ Hướng API + Ứng dụng |
+ Mã Python |
+ Hướng ứng dụng |
+ Hướng API |
+
+
+ LLMs được hỗ trợ |
+ Đa dạng phong phú |
+ Đa dạng phong phú |
+ Đa dạng phong phú |
+ Chỉ OpenAI |
+
+
+ RAG Engine |
+ ✅ |
+ ✅ |
+ ✅ |
+ ✅ |
+
+
+ Agent |
+ ✅ |
+ ✅ |
+ ❌ |
+ ✅ |
+
+
+ Quy trình làm việc |
+ ✅ |
+ ❌ |
+ ✅ |
+ ❌ |
+
+
+ Khả năng quan sát |
+ ✅ |
+ ✅ |
+ ❌ |
+ ❌ |
+
+
+ Tính năng doanh nghiệp (SSO/Kiểm soát truy cập) |
+ ✅ |
+ ❌ |
+ ❌ |
+ ❌ |
+
+
+ Triển khai cục bộ |
+ ✅ |
+ ✅ |
+ ✅ |
+ ❌ |
+
+
+
+## Sử dụng Dify
+
+- **Cloud **
+Chúng tôi lưu trữ dịch vụ [Dify Cloud](https://dify.ai) cho bất kỳ ai muốn thử mà không cần cài đặt. Nó cung cấp tất cả các khả năng của phiên bản tự triển khai và bao gồm 200 lượt gọi GPT-4 miễn phí trong gói sandbox.
+
+- **Tự triển khai Dify Community Edition**
+Nhanh chóng chạy Dify trong môi trường của bạn với [hướng dẫn bắt đầu](#quick-start) này.
+Sử dụng [tài liệu](https://docs.dify.ai) của chúng tôi để tham khảo thêm và nhận hướng dẫn chi tiết hơn.
+
+- **Dify cho doanh nghiệp / tổ chức**
+Chúng tôi cung cấp các tính năng bổ sung tập trung vào doanh nghiệp. [Ghi lại câu hỏi của bạn cho chúng tôi thông qua chatbot này](https://udify.app/chat/22L1zSxg6yW1cWQg) hoặc [gửi email cho chúng tôi](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) để thảo luận về nhu cầu doanh nghiệp.
+ > Đối với các công ty khởi nghiệp và doanh nghiệp nhỏ sử dụng AWS, hãy xem [Dify Premium trên AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) và triển khai nó vào AWS VPC của riêng bạn chỉ với một cú nhấp chuột. Đây là một AMI giá cả phải chăng với tùy chọn tạo ứng dụng với logo và thương hiệu tùy chỉnh.
+
+
+## Luôn cập nhật
+
+Yêu thích Dify trên GitHub và được thông báo ngay lập tức về các bản phát hành mới.
+
+![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
+
+
+
+## Bắt đầu nhanh
+> Trước khi cài đặt Dify, hãy đảm bảo máy của bạn đáp ứng các yêu cầu hệ thống tối thiểu sau:
+>
+>- CPU >= 2 Core
+>- RAM >= 4GB
+
+
+
+Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn:
+
+```bash
+cd docker
+cp .env.example .env
+docker compose up -d
+```
+
+Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong trình duyệt của bạn tại [http://localhost/install](http://localhost/install) và bắt đầu quá trình khởi tạo.
+
+> Nếu bạn muốn đóng góp cho Dify hoặc phát triển thêm, hãy tham khảo [hướng dẫn triển khai từ mã nguồn](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) của chúng tôi
+
+## Các bước tiếp theo
+
+Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments).
+
+Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes.
+
+- [Helm Chart bởi @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
+- [Helm Chart bởi @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
+- [Tệp YAML bởi @Winson-030](https://github.com/Winson-030/dify-kubernetes)
+
+#### Sử dụng Terraform để Triển khai
+
+##### Azure Global
+Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/).
+- [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform)
+
+## Đóng góp
+
+Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
+Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
+
+
+> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
+
+**Người đóng góp**
+
+
+
+
+
+## Cộng đồng & liên hệ
+
+* [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi.
+* [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
+* [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
+* [Twitter](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
+
+## Lịch sử Yêu thích
+
+[![Biểu đồ Lịch sử Yêu thích](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
+
+## Tiết lộ bảo mật
+
+Để bảo vệ quyền riêng tư của bạn, vui lòng tránh đăng các vấn đề bảo mật trên GitHub. Thay vào đó, hãy gửi câu hỏi của bạn đến security@dify.ai và chúng tôi sẽ cung cấp cho bạn câu trả lời chi tiết hơn.
+
+## Giấy phép
+
+Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.
\ No newline at end of file
diff --git a/api/.env.example b/api/.env.example
index cf3a0f302d60cc..e41e2271d52d24 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -60,7 +60,8 @@ ALIYUN_OSS_SECRET_KEY=your-secret-key
ALIYUN_OSS_ENDPOINT=your-endpoint
ALIYUN_OSS_AUTH_VERSION=v1
ALIYUN_OSS_REGION=your-region
-
+# Don't start with '/'. OSS doesn't support leading slash in object names.
+ALIYUN_OSS_PATH=your-path
# Google Storage configuration
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
@@ -130,6 +131,12 @@ TENCENT_VECTOR_DB_DATABASE=dify
TENCENT_VECTOR_DB_SHARD=1
TENCENT_VECTOR_DB_REPLICAS=2
+# ElasticSearch configuration
+ELASTICSEARCH_HOST=127.0.0.1
+ELASTICSEARCH_PORT=9200
+ELASTICSEARCH_USERNAME=elastic
+ELASTICSEARCH_PASSWORD=elastic
+
# PGVECTO_RS configuration
PGVECTO_RS_HOST=localhost
PGVECTO_RS_PORT=5431
@@ -241,8 +248,8 @@ API_TOOL_DEFAULT_READ_TIMEOUT=60
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300
HTTP_REQUEST_MAX_READ_TIMEOUT=600
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
-HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 # 10MB
-HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 # 1MB
+HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
+HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
# Log file path
LOG_FILE=
@@ -261,4 +268,13 @@ APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
-CELERY_BEAT_SCHEDULER_TIME=1
\ No newline at end of file
+CELERY_BEAT_SCHEDULER_TIME=1
+
+# Position configuration
+POSITION_TOOL_PINS=
+POSITION_TOOL_INCLUDES=
+POSITION_TOOL_EXCLUDES=
+
+POSITION_PROVIDER_PINS=
+POSITION_PROVIDER_INCLUDES=
+POSITION_PROVIDER_EXCLUDES=
diff --git a/.idea/icon.png b/api/.idea/icon.png
similarity index 100%
rename from .idea/icon.png
rename to api/.idea/icon.png
diff --git a/.idea/vcs.xml b/api/.idea/vcs.xml
similarity index 88%
rename from .idea/vcs.xml
rename to api/.idea/vcs.xml
index ae8b1755c52a17..b7af618884ac3b 100644
--- a/.idea/vcs.xml
+++ b/api/.idea/vcs.xml
@@ -12,5 +12,6 @@
+
-
\ No newline at end of file
+
diff --git a/.vscode/launch.json b/api/.vscode/launch.json.example
similarity index 83%
rename from .vscode/launch.json
rename to api/.vscode/launch.json.example
index e4eb6aef932faf..e9f8e42dd5341c 100644
--- a/.vscode/launch.json
+++ b/api/.vscode/launch.json.example
@@ -5,8 +5,8 @@
"name": "Python: Flask",
"type": "debugpy",
"request": "launch",
- "python": "${workspaceFolder}/api/.venv/bin/python",
- "cwd": "${workspaceFolder}/api",
+ "python": "${workspaceFolder}/.venv/bin/python",
+ "cwd": "${workspaceFolder}",
"envFile": ".env",
"module": "flask",
"justMyCode": true,
@@ -18,15 +18,15 @@
"args": [
"run",
"--host=0.0.0.0",
- "--port=5001",
+ "--port=5001"
]
},
{
"name": "Python: Celery",
"type": "debugpy",
"request": "launch",
- "python": "${workspaceFolder}/api/.venv/bin/python",
- "cwd": "${workspaceFolder}/api",
+ "python": "${workspaceFolder}/.venv/bin/python",
+ "cwd": "${workspaceFolder}",
"module": "celery",
"justMyCode": true,
"envFile": ".env",
diff --git a/api/Dockerfile b/api/Dockerfile
index 55776f80e136c8..82b89ad77ba1ac 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -5,6 +5,10 @@ WORKDIR /app/api
# Install Poetry
ENV POETRY_VERSION=1.8.3
+
+# if you located in China, you can use aliyun mirror to speed up
+# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
+
RUN pip install --no-cache-dir poetry==${POETRY_VERSION}
# Configure Poetry
@@ -12,9 +16,13 @@ ENV POETRY_CACHE_DIR=/tmp/poetry_cache
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
ENV POETRY_VIRTUALENVS_CREATE=true
+ENV POETRY_REQUESTS_TIMEOUT=15
FROM base AS packages
+# if you located in China, you can use aliyun mirror to speed up
+# RUN sed -i 's@deb.debian.org@mirrors.aliyun.com@g' /etc/apt/sources.list.d/debian.sources
+
RUN apt-get update \
&& apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev
@@ -41,8 +49,14 @@ ENV TZ=UTC
WORKDIR /app/api
RUN apt-get update \
- && apt-get install -y --no-install-recommends curl wget vim nodejs ffmpeg libgmp-dev libmpfr-dev libmpc-dev \
- && apt-get autoremove \
+ && apt-get install -y --no-install-recommends curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
+ # if you located in China, you can use aliyun mirror to speed up
+ # && echo "deb http://mirrors.aliyun.com/debian testing main" > /etc/apt/sources.list \
+ && echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
+ && apt-get update \
+ # For Security
+ && apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.2-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \
+ && apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/*
# Copy Python environment and packages
@@ -50,6 +64,9 @@ ENV VIRTUAL_ENV=/app/api/.venv
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
+# Download nltk data
+RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
+
# Copy source code
COPY . /app/api/
diff --git a/api/app.py b/api/app.py
index 50441cb81da1f4..ad219ca0d67459 100644
--- a/api/app.py
+++ b/api/app.py
@@ -1,6 +1,6 @@
import os
-if os.environ.get("DEBUG", "false").lower() != 'true':
+if os.environ.get("DEBUG", "false").lower() != "true":
from gevent import monkey
monkey.patch_all()
@@ -57,7 +57,7 @@
if os.name == "nt":
os.system('tzutil /s "UTC"')
else:
- os.environ['TZ'] = 'UTC'
+ os.environ["TZ"] = "UTC"
time.tzset()
@@ -70,13 +70,14 @@ class DifyApp(Flask):
# -------------
-config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first
+config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first
# ----------------------------
# Application Factory Function
# ----------------------------
+
def create_flask_app_with_configs() -> Flask:
"""
create a raw flask app
@@ -92,7 +93,7 @@ def create_flask_app_with_configs() -> Flask:
elif isinstance(value, int | float | bool):
os.environ[key] = str(value)
elif value is None:
- os.environ[key] = ''
+ os.environ[key] = ""
return dify_app
@@ -100,10 +101,10 @@ def create_flask_app_with_configs() -> Flask:
def create_app() -> Flask:
app = create_flask_app_with_configs()
- app.secret_key = app.config['SECRET_KEY']
+ app.secret_key = app.config["SECRET_KEY"]
log_handlers = None
- log_file = app.config.get('LOG_FILE')
+ log_file = app.config.get("LOG_FILE")
if log_file:
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
@@ -111,23 +112,24 @@ def create_app() -> Flask:
RotatingFileHandler(
filename=log_file,
maxBytes=1024 * 1024 * 1024,
- backupCount=5
+ backupCount=5,
),
- logging.StreamHandler(sys.stdout)
+ logging.StreamHandler(sys.stdout),
]
logging.basicConfig(
- level=app.config.get('LOG_LEVEL'),
- format=app.config.get('LOG_FORMAT'),
- datefmt=app.config.get('LOG_DATEFORMAT'),
+ level=app.config.get("LOG_LEVEL"),
+ format=app.config.get("LOG_FORMAT"),
+ datefmt=app.config.get("LOG_DATEFORMAT"),
handlers=log_handlers,
- force=True
+ force=True,
)
- log_tz = app.config.get('LOG_TZ')
+ log_tz = app.config.get("LOG_TZ")
if log_tz:
from datetime import datetime
import pytz
+
timezone = pytz.timezone(log_tz)
def time_converter(seconds):
@@ -162,24 +164,24 @@ def initialize_extensions(app):
@login_manager.request_loader
def load_user_from_request(request_from_flask_login):
"""Load user based on the request."""
- if request.blueprint not in ['console', 'inner_api']:
+ if request.blueprint not in ["console", "inner_api"]:
return None
# Check if the user_id contains a dot, indicating the old format
- auth_header = request.headers.get('Authorization', '')
+ auth_header = request.headers.get("Authorization", "")
if not auth_header:
- auth_token = request.args.get('_token')
+ auth_token = request.args.get("_token")
if not auth_token:
- raise Unauthorized('Invalid Authorization token.')
+ raise Unauthorized("Invalid Authorization token.")
else:
- if ' ' not in auth_header:
- raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.')
+ if " " not in auth_header:
+ raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.")
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
- if auth_scheme != 'bearer':
- raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.')
+ if auth_scheme != "bearer":
+ raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.")
decoded = PassportService().verify(auth_token)
- user_id = decoded.get('user_id')
+ user_id = decoded.get("user_id")
account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token)
if account:
@@ -190,10 +192,11 @@ def load_user_from_request(request_from_flask_login):
@login_manager.unauthorized_handler
def unauthorized_handler():
"""Handle unauthorized requests."""
- return Response(json.dumps({
- 'code': 'unauthorized',
- 'message': "Unauthorized."
- }), status=401, content_type="application/json")
+ return Response(
+ json.dumps({"code": "unauthorized", "message": "Unauthorized."}),
+ status=401,
+ content_type="application/json",
+ )
# register blueprint routers
@@ -204,38 +207,36 @@ def register_blueprints(app):
from controllers.service_api import bp as service_api_bp
from controllers.web import bp as web_bp
- CORS(service_api_bp,
- allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
- methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
- )
+ CORS(
+ service_api_bp,
+ allow_headers=["Content-Type", "Authorization", "X-App-Code"],
+ methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
+ )
app.register_blueprint(service_api_bp)
- CORS(web_bp,
- resources={
- r"/*": {"origins": app.config['WEB_API_CORS_ALLOW_ORIGINS']}},
- supports_credentials=True,
- allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
- methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
- expose_headers=['X-Version', 'X-Env']
- )
+ CORS(
+ web_bp,
+ resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}},
+ supports_credentials=True,
+ allow_headers=["Content-Type", "Authorization", "X-App-Code"],
+ methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
+ expose_headers=["X-Version", "X-Env"],
+ )
app.register_blueprint(web_bp)
- CORS(console_app_bp,
- resources={
- r"/*": {"origins": app.config['CONSOLE_CORS_ALLOW_ORIGINS']}},
- supports_credentials=True,
- allow_headers=['Content-Type', 'Authorization'],
- methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
- expose_headers=['X-Version', 'X-Env']
- )
+ CORS(
+ console_app_bp,
+ resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}},
+ supports_credentials=True,
+ allow_headers=["Content-Type", "Authorization"],
+ methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
+ expose_headers=["X-Version", "X-Env"],
+ )
app.register_blueprint(console_app_bp)
- CORS(files_bp,
- allow_headers=['Content-Type'],
- methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
- )
+ CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"])
app.register_blueprint(files_bp)
app.register_blueprint(inner_api_bp)
@@ -245,29 +246,29 @@ def register_blueprints(app):
app = create_app()
celery = app.extensions["celery"]
-if app.config.get('TESTING'):
+if app.config.get("TESTING"):
print("App is running in TESTING mode")
@app.after_request
def after_request(response):
"""Add Version headers to the response."""
- response.set_cookie('remember_token', '', expires=0)
- response.headers.add('X-Version', app.config['CURRENT_VERSION'])
- response.headers.add('X-Env', app.config['DEPLOY_ENV'])
+ response.set_cookie("remember_token", "", expires=0)
+ response.headers.add("X-Version", app.config["CURRENT_VERSION"])
+ response.headers.add("X-Env", app.config["DEPLOY_ENV"])
return response
-@app.route('/health')
+@app.route("/health")
def health():
- return Response(json.dumps({
- 'pid': os.getpid(),
- 'status': 'ok',
- 'version': app.config['CURRENT_VERSION']
- }), status=200, content_type="application/json")
+ return Response(
+ json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}),
+ status=200,
+ content_type="application/json",
+ )
-@app.route('/threads')
+@app.route("/threads")
def threads():
num_threads = threading.active_count()
threads = threading.enumerate()
@@ -278,32 +279,34 @@ def threads():
thread_id = thread.ident
is_alive = thread.is_alive()
- thread_list.append({
- 'name': thread_name,
- 'id': thread_id,
- 'is_alive': is_alive
- })
+ thread_list.append(
+ {
+ "name": thread_name,
+ "id": thread_id,
+ "is_alive": is_alive,
+ }
+ )
return {
- 'pid': os.getpid(),
- 'thread_num': num_threads,
- 'threads': thread_list
+ "pid": os.getpid(),
+ "thread_num": num_threads,
+ "threads": thread_list,
}
-@app.route('/db-pool-stat')
+@app.route("/db-pool-stat")
def pool_stat():
engine = db.engine
return {
- 'pid': os.getpid(),
- 'pool_size': engine.pool.size(),
- 'checked_in_connections': engine.pool.checkedin(),
- 'checked_out_connections': engine.pool.checkedout(),
- 'overflow_connections': engine.pool.overflow(),
- 'connection_timeout': engine.pool.timeout(),
- 'recycle_time': db.engine.pool._recycle
+ "pid": os.getpid(),
+ "pool_size": engine.pool.size(),
+ "checked_in_connections": engine.pool.checkedin(),
+ "checked_out_connections": engine.pool.checkedout(),
+ "overflow_connections": engine.pool.overflow(),
+ "connection_timeout": engine.pool.timeout(),
+ "recycle_time": db.engine.pool._recycle,
}
-if __name__ == '__main__':
- app.run(host='0.0.0.0', port=5001)
+if __name__ == "__main__":
+ app.run(host="0.0.0.0", port=5001)
diff --git a/api/commands.py b/api/commands.py
index c7ffb47b512246..3bf8bc0ecc45f2 100644
--- a/api/commands.py
+++ b/api/commands.py
@@ -27,32 +27,29 @@
from services.account_service import RegisterService, TenantService
-@click.command('reset-password', help='Reset the account password.')
-@click.option('--email', prompt=True, help='The email address of the account whose password you need to reset')
-@click.option('--new-password', prompt=True, help='the new password.')
-@click.option('--password-confirm', prompt=True, help='the new password confirm.')
+@click.command("reset-password", help="Reset the account password.")
+@click.option("--email", prompt=True, help="The email address of the account whose password you need to reset")
+@click.option("--new-password", prompt=True, help="the new password.")
+@click.option("--password-confirm", prompt=True, help="the new password confirm.")
def reset_password(email, new_password, password_confirm):
"""
Reset password of owner account
Only available in SELF_HOSTED mode
"""
if str(new_password).strip() != str(password_confirm).strip():
- click.echo(click.style('sorry. The two passwords do not match.', fg='red'))
+ click.echo(click.style("sorry. The two passwords do not match.", fg="red"))
return
- account = db.session.query(Account). \
- filter(Account.email == email). \
- one_or_none()
+ account = db.session.query(Account).filter(Account.email == email).one_or_none()
if not account:
- click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
+ click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
return
try:
valid_password(new_password)
except:
- click.echo(
- click.style('sorry. The passwords must match {} '.format(password_pattern), fg='red'))
+ click.echo(click.style("sorry. The passwords must match {} ".format(password_pattern), fg="red"))
return
# generate password salt
@@ -65,80 +62,87 @@ def reset_password(email, new_password, password_confirm):
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
- click.echo(click.style('Congratulations! Password has been reset.', fg='green'))
+ click.echo(click.style("Congratulations! Password has been reset.", fg="green"))
-@click.command('reset-email', help='Reset the account email.')
-@click.option('--email', prompt=True, help='The old email address of the account whose email you need to reset')
-@click.option('--new-email', prompt=True, help='the new email.')
-@click.option('--email-confirm', prompt=True, help='the new email confirm.')
+@click.command("reset-email", help="Reset the account email.")
+@click.option("--email", prompt=True, help="The old email address of the account whose email you need to reset")
+@click.option("--new-email", prompt=True, help="the new email.")
+@click.option("--email-confirm", prompt=True, help="the new email confirm.")
def reset_email(email, new_email, email_confirm):
"""
Replace account email
:return:
"""
if str(new_email).strip() != str(email_confirm).strip():
- click.echo(click.style('Sorry, new email and confirm email do not match.', fg='red'))
+ click.echo(click.style("Sorry, new email and confirm email do not match.", fg="red"))
return
- account = db.session.query(Account). \
- filter(Account.email == email). \
- one_or_none()
+ account = db.session.query(Account).filter(Account.email == email).one_or_none()
if not account:
- click.echo(click.style('sorry. the account: [{}] not exist .'.format(email), fg='red'))
+ click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
return
try:
email_validate(new_email)
except:
- click.echo(
- click.style('sorry. {} is not a valid email. '.format(email), fg='red'))
+ click.echo(click.style("sorry. {} is not a valid email. ".format(email), fg="red"))
return
account.email = new_email
db.session.commit()
- click.echo(click.style('Congratulations!, email has been reset.', fg='green'))
-
-
-@click.command('reset-encrypt-key-pair', help='Reset the asymmetric key pair of workspace for encrypt LLM credentials. '
- 'After the reset, all LLM credentials will become invalid, '
- 'requiring re-entry.'
- 'Only support SELF_HOSTED mode.')
-@click.confirmation_option(prompt=click.style('Are you sure you want to reset encrypt key pair?'
- ' this operation cannot be rolled back!', fg='red'))
+ click.echo(click.style("Congratulations!, email has been reset.", fg="green"))
+
+
+@click.command(
+ "reset-encrypt-key-pair",
+ help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
+ "After the reset, all LLM credentials will become invalid, "
+ "requiring re-entry."
+ "Only support SELF_HOSTED mode.",
+)
+@click.confirmation_option(
+ prompt=click.style(
+ "Are you sure you want to reset encrypt key pair?" " this operation cannot be rolled back!", fg="red"
+ )
+)
def reset_encrypt_key_pair():
"""
Reset the encrypted key pair of workspace for encrypt LLM credentials.
After the reset, all LLM credentials will become invalid, requiring re-entry.
Only support SELF_HOSTED mode.
"""
- if dify_config.EDITION != 'SELF_HOSTED':
- click.echo(click.style('Sorry, only support SELF_HOSTED mode.', fg='red'))
+ if dify_config.EDITION != "SELF_HOSTED":
+ click.echo(click.style("Sorry, only support SELF_HOSTED mode.", fg="red"))
return
tenants = db.session.query(Tenant).all()
for tenant in tenants:
if not tenant:
- click.echo(click.style('Sorry, no workspace found. Please enter /install to initialize.', fg='red'))
+ click.echo(click.style("Sorry, no workspace found. Please enter /install to initialize.", fg="red"))
return
tenant.encrypt_public_key = generate_key_pair(tenant.id)
- db.session.query(Provider).filter(Provider.provider_type == 'custom', Provider.tenant_id == tenant.id).delete()
+ db.session.query(Provider).filter(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
db.session.query(ProviderModel).filter(ProviderModel.tenant_id == tenant.id).delete()
db.session.commit()
- click.echo(click.style('Congratulations! '
- 'the asymmetric key pair of workspace {} has been reset.'.format(tenant.id), fg='green'))
+ click.echo(
+ click.style(
+ "Congratulations! " "the asymmetric key pair of workspace {} has been reset.".format(tenant.id),
+ fg="green",
+ )
+ )
-@click.command('vdb-migrate', help='migrate vector db.')
-@click.option('--scope', default='all', prompt=False, help='The scope of vector database to migrate, Default is All.')
+@click.command("vdb-migrate", help="migrate vector db.")
+@click.option("--scope", default="all", prompt=False, help="The scope of vector database to migrate, Default is All.")
def vdb_migrate(scope: str):
- if scope in ['knowledge', 'all']:
+ if scope in ["knowledge", "all"]:
migrate_knowledge_vector_database()
- if scope in ['annotation', 'all']:
+ if scope in ["annotation", "all"]:
migrate_annotation_vector_database()
@@ -146,7 +150,7 @@ def migrate_annotation_vector_database():
"""
Migrate annotation datas to target vector database .
"""
- click.echo(click.style('Start migrate annotation data.', fg='green'))
+ click.echo(click.style("Start migrate annotation data.", fg="green"))
create_count = 0
skipped_count = 0
total_count = 0
@@ -154,98 +158,103 @@ def migrate_annotation_vector_database():
while True:
try:
# get apps info
- apps = db.session.query(App).filter(
- App.status == 'normal'
- ).order_by(App.created_at.desc()).paginate(page=page, per_page=50)
+ apps = (
+ db.session.query(App)
+ .filter(App.status == "normal")
+ .order_by(App.created_at.desc())
+ .paginate(page=page, per_page=50)
+ )
except NotFound:
break
page += 1
for app in apps:
total_count = total_count + 1
- click.echo(f'Processing the {total_count} app {app.id}. '
- + f'{create_count} created, {skipped_count} skipped.')
+ click.echo(
+ f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
+ )
try:
- click.echo('Create app annotation index: {}'.format(app.id))
- app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
- AppAnnotationSetting.app_id == app.id
- ).first()
+ click.echo("Create app annotation index: {}".format(app.id))
+ app_annotation_setting = (
+ db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first()
+ )
if not app_annotation_setting:
skipped_count = skipped_count + 1
- click.echo('App annotation setting is disabled: {}'.format(app.id))
+ click.echo("App annotation setting is disabled: {}".format(app.id))
continue
# get dataset_collection_binding info
- dataset_collection_binding = db.session.query(DatasetCollectionBinding).filter(
- DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id
- ).first()
+ dataset_collection_binding = (
+ db.session.query(DatasetCollectionBinding)
+ .filter(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
+ .first()
+ )
if not dataset_collection_binding:
- click.echo('App annotation collection binding is not exist: {}'.format(app.id))
+ click.echo("App annotation collection binding is not exist: {}".format(app.id))
continue
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
- indexing_technique='high_quality',
+ indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
- collection_binding_id=dataset_collection_binding.id
+ collection_binding_id=dataset_collection_binding.id,
)
documents = []
if annotations:
for annotation in annotations:
document = Document(
page_content=annotation.question,
- metadata={
- "annotation_id": annotation.id,
- "app_id": app.id,
- "doc_id": annotation.id
- }
+ metadata={"annotation_id": annotation.id, "app_id": app.id, "doc_id": annotation.id},
)
documents.append(document)
- vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
+ vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
click.echo(f"Start to migrate annotation, app_id: {app.id}.")
try:
vector.delete()
- click.echo(
- click.style(f'Successfully delete vector index for app: {app.id}.',
- fg='green'))
+ click.echo(click.style(f"Successfully delete vector index for app: {app.id}.", fg="green"))
except Exception as e:
- click.echo(
- click.style(f'Failed to delete vector index for app {app.id}.',
- fg='red'))
+ click.echo(click.style(f"Failed to delete vector index for app {app.id}.", fg="red"))
raise e
if documents:
try:
- click.echo(click.style(
- f'Start to created vector index with {len(documents)} annotations for app {app.id}.',
- fg='green'))
- vector.create(documents)
click.echo(
- click.style(f'Successfully created vector index for app {app.id}.', fg='green'))
+ click.style(
+ f"Start to created vector index with {len(documents)} annotations for app {app.id}.",
+ fg="green",
+ )
+ )
+ vector.create(documents)
+ click.echo(click.style(f"Successfully created vector index for app {app.id}.", fg="green"))
except Exception as e:
- click.echo(click.style(f'Failed to created vector index for app {app.id}.', fg='red'))
+ click.echo(click.style(f"Failed to created vector index for app {app.id}.", fg="red"))
raise e
- click.echo(f'Successfully migrated app annotation {app.id}.')
+ click.echo(f"Successfully migrated app annotation {app.id}.")
create_count += 1
except Exception as e:
click.echo(
- click.style('Create app annotation index error: {} {}'.format(e.__class__.__name__, str(e)),
- fg='red'))
+ click.style(
+ "Create app annotation index error: {} {}".format(e.__class__.__name__, str(e)), fg="red"
+ )
+ )
continue
click.echo(
- click.style(f'Congratulations! Create {create_count} app annotation indexes, and skipped {skipped_count} apps.',
- fg='green'))
+ click.style(
+ f"Congratulations! Create {create_count} app annotation indexes, and skipped {skipped_count} apps.",
+ fg="green",
+ )
+ )
def migrate_knowledge_vector_database():
"""
Migrate vector database datas to target vector database .
"""
- click.echo(click.style('Start migrate vector db.', fg='green'))
+ click.echo(click.style("Start migrate vector db.", fg="green"))
create_count = 0
skipped_count = 0
total_count = 0
@@ -253,87 +262,77 @@ def migrate_knowledge_vector_database():
page = 1
while True:
try:
- datasets = db.session.query(Dataset).filter(Dataset.indexing_technique == 'high_quality') \
- .order_by(Dataset.created_at.desc()).paginate(page=page, per_page=50)
+ datasets = (
+ db.session.query(Dataset)
+ .filter(Dataset.indexing_technique == "high_quality")
+ .order_by(Dataset.created_at.desc())
+ .paginate(page=page, per_page=50)
+ )
except NotFound:
break
page += 1
for dataset in datasets:
total_count = total_count + 1
- click.echo(f'Processing the {total_count} dataset {dataset.id}. '
- + f'{create_count} created, {skipped_count} skipped.')
+ click.echo(
+ f"Processing the {total_count} dataset {dataset.id}. "
+ + f"{create_count} created, {skipped_count} skipped."
+ )
try:
- click.echo('Create dataset vdb index: {}'.format(dataset.id))
+ click.echo("Create dataset vdb index: {}".format(dataset.id))
if dataset.index_struct_dict:
- if dataset.index_struct_dict['type'] == vector_type:
+ if dataset.index_struct_dict["type"] == vector_type:
skipped_count = skipped_count + 1
continue
- collection_name = ''
+ collection_name = ""
if vector_type == VectorType.WEAVIATE:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": VectorType.WEAVIATE,
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": VectorType.WEAVIATE, "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.QDRANT:
if dataset.collection_binding_id:
- dataset_collection_binding = db.session.query(DatasetCollectionBinding). \
- filter(DatasetCollectionBinding.id == dataset.collection_binding_id). \
- one_or_none()
+ dataset_collection_binding = (
+ db.session.query(DatasetCollectionBinding)
+ .filter(DatasetCollectionBinding.id == dataset.collection_binding_id)
+ .one_or_none()
+ )
if dataset_collection_binding:
collection_name = dataset_collection_binding.collection_name
else:
- raise ValueError('Dataset Collection Bindings is not exist!')
+ raise ValueError("Dataset Collection Bindings is not exist!")
else:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": VectorType.QDRANT,
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": VectorType.QDRANT, "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.MILVUS:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": VectorType.MILVUS,
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": VectorType.MILVUS, "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.RELYT:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": 'relyt',
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": "relyt", "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.TENCENT:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": VectorType.TENCENT,
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": VectorType.TENCENT, "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.PGVECTOR:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
- index_struct_dict = {
- "type": VectorType.PGVECTOR,
- "vector_store": {"class_prefix": collection_name}
- }
+ index_struct_dict = {"type": VectorType.PGVECTOR, "vector_store": {"class_prefix": collection_name}}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.OPENSEARCH:
dataset_id = dataset.id
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.OPENSEARCH,
- "vector_store": {"class_prefix": collection_name}
+ "vector_store": {"class_prefix": collection_name},
}
dataset.index_struct = json.dumps(index_struct_dict)
elif vector_type == VectorType.ANALYTICDB:
@@ -341,9 +340,14 @@ def migrate_knowledge_vector_database():
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
index_struct_dict = {
"type": VectorType.ANALYTICDB,
- "vector_store": {"class_prefix": collection_name}
+ "vector_store": {"class_prefix": collection_name},
}
dataset.index_struct = json.dumps(index_struct_dict)
+ elif vector_type == VectorType.ELASTICSEARCH:
+ dataset_id = dataset.id
+ index_name = Dataset.gen_collection_name_by_id(dataset_id)
+ index_struct_dict = {"type": "elasticsearch", "vector_store": {"class_prefix": index_name}}
+ dataset.index_struct = json.dumps(index_struct_dict)
else:
raise ValueError(f"Vector store {vector_type} is not supported.")
@@ -353,29 +357,41 @@ def migrate_knowledge_vector_database():
try:
vector.delete()
click.echo(
- click.style(f'Successfully delete vector index {collection_name} for dataset {dataset.id}.',
- fg='green'))
+ click.style(
+ f"Successfully delete vector index {collection_name} for dataset {dataset.id}.", fg="green"
+ )
+ )
except Exception as e:
click.echo(
- click.style(f'Failed to delete vector index {collection_name} for dataset {dataset.id}.',
- fg='red'))
+ click.style(
+ f"Failed to delete vector index {collection_name} for dataset {dataset.id}.", fg="red"
+ )
+ )
raise e
- dataset_documents = db.session.query(DatasetDocument).filter(
- DatasetDocument.dataset_id == dataset.id,
- DatasetDocument.indexing_status == 'completed',
- DatasetDocument.enabled == True,
- DatasetDocument.archived == False,
- ).all()
+ dataset_documents = (
+ db.session.query(DatasetDocument)
+ .filter(
+ DatasetDocument.dataset_id == dataset.id,
+ DatasetDocument.indexing_status == "completed",
+ DatasetDocument.enabled == True,
+ DatasetDocument.archived == False,
+ )
+ .all()
+ )
documents = []
segments_count = 0
for dataset_document in dataset_documents:
- segments = db.session.query(DocumentSegment).filter(
- DocumentSegment.document_id == dataset_document.id,
- DocumentSegment.status == 'completed',
- DocumentSegment.enabled == True
- ).all()
+ segments = (
+ db.session.query(DocumentSegment)
+ .filter(
+ DocumentSegment.document_id == dataset_document.id,
+ DocumentSegment.status == "completed",
+ DocumentSegment.enabled == True,
+ )
+ .all()
+ )
for segment in segments:
document = Document(
@@ -385,7 +401,7 @@ def migrate_knowledge_vector_database():
"doc_hash": segment.index_node_hash,
"document_id": segment.document_id,
"dataset_id": segment.dataset_id,
- }
+ },
)
documents.append(document)
@@ -393,37 +409,43 @@ def migrate_knowledge_vector_database():
if documents:
try:
- click.echo(click.style(
- f'Start to created vector index with {len(documents)} documents of {segments_count} segments for dataset {dataset.id}.',
- fg='green'))
+ click.echo(
+ click.style(
+ f"Start to created vector index with {len(documents)} documents of {segments_count} segments for dataset {dataset.id}.",
+ fg="green",
+ )
+ )
vector.create(documents)
click.echo(
- click.style(f'Successfully created vector index for dataset {dataset.id}.', fg='green'))
+ click.style(f"Successfully created vector index for dataset {dataset.id}.", fg="green")
+ )
except Exception as e:
- click.echo(click.style(f'Failed to created vector index for dataset {dataset.id}.', fg='red'))
+ click.echo(click.style(f"Failed to created vector index for dataset {dataset.id}.", fg="red"))
raise e
db.session.add(dataset)
db.session.commit()
- click.echo(f'Successfully migrated dataset {dataset.id}.')
+ click.echo(f"Successfully migrated dataset {dataset.id}.")
create_count += 1
except Exception as e:
db.session.rollback()
click.echo(
- click.style('Create dataset index error: {} {}'.format(e.__class__.__name__, str(e)),
- fg='red'))
+ click.style("Create dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
+ )
continue
click.echo(
- click.style(f'Congratulations! Create {create_count} dataset indexes, and skipped {skipped_count} datasets.',
- fg='green'))
+ click.style(
+ f"Congratulations! Create {create_count} dataset indexes, and skipped {skipped_count} datasets.", fg="green"
+ )
+ )
-@click.command('convert-to-agent-apps', help='Convert Agent Assistant to Agent App.')
+@click.command("convert-to-agent-apps", help="Convert Agent Assistant to Agent App.")
def convert_to_agent_apps():
"""
Convert Agent Assistant to Agent App.
"""
- click.echo(click.style('Start convert to agent apps.', fg='green'))
+ click.echo(click.style("Start convert to agent apps.", fg="green"))
proceeded_app_ids = []
@@ -458,7 +480,7 @@ def convert_to_agent_apps():
break
for app in apps:
- click.echo('Converting app: {}'.format(app.id))
+ click.echo("Converting app: {}".format(app.id))
try:
app.mode = AppMode.AGENT_CHAT.value
@@ -470,137 +492,142 @@ def convert_to_agent_apps():
)
db.session.commit()
- click.echo(click.style('Converted app: {}'.format(app.id), fg='green'))
+ click.echo(click.style("Converted app: {}".format(app.id), fg="green"))
except Exception as e:
- click.echo(
- click.style('Convert app error: {} {}'.format(e.__class__.__name__,
- str(e)), fg='red'))
+ click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red"))
- click.echo(click.style('Congratulations! Converted {} agent apps.'.format(len(proceeded_app_ids)), fg='green'))
+ click.echo(click.style("Congratulations! Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
-@click.command('add-qdrant-doc-id-index', help='add qdrant doc_id index.')
-@click.option('--field', default='metadata.doc_id', prompt=False, help='index field , default is metadata.doc_id.')
+@click.command("add-qdrant-doc-id-index", help="add qdrant doc_id index.")
+@click.option("--field", default="metadata.doc_id", prompt=False, help="index field , default is metadata.doc_id.")
def add_qdrant_doc_id_index(field: str):
- click.echo(click.style('Start add qdrant doc_id index.', fg='green'))
+ click.echo(click.style("Start add qdrant doc_id index.", fg="green"))
vector_type = dify_config.VECTOR_STORE
if vector_type != "qdrant":
- click.echo(click.style('Sorry, only support qdrant vector store.', fg='red'))
+ click.echo(click.style("Sorry, only support qdrant vector store.", fg="red"))
return
create_count = 0
try:
bindings = db.session.query(DatasetCollectionBinding).all()
if not bindings:
- click.echo(click.style('Sorry, no dataset collection bindings found.', fg='red'))
+ click.echo(click.style("Sorry, no dataset collection bindings found.", fg="red"))
return
import qdrant_client
from qdrant_client.http.exceptions import UnexpectedResponse
from qdrant_client.http.models import PayloadSchemaType
from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
+
for binding in bindings:
if dify_config.QDRANT_URL is None:
- raise ValueError('Qdrant url is required.')
+ raise ValueError("Qdrant url is required.")
qdrant_config = QdrantConfig(
endpoint=dify_config.QDRANT_URL,
api_key=dify_config.QDRANT_API_KEY,
root_path=current_app.root_path,
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.QDRANT_GRPC_PORT,
- prefer_grpc=dify_config.QDRANT_GRPC_ENABLED
+ prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
)
try:
client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
# create payload index
- client.create_payload_index(binding.collection_name, field,
- field_schema=PayloadSchemaType.KEYWORD)
+ client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
create_count += 1
except UnexpectedResponse as e:
# Collection does not exist, so return
if e.status_code == 404:
- click.echo(click.style(f'Collection not found, collection_name:{binding.collection_name}.', fg='red'))
+ click.echo(
+ click.style(f"Collection not found, collection_name:{binding.collection_name}.", fg="red")
+ )
continue
# Some other error occurred, so re-raise the exception
else:
- click.echo(click.style(f'Failed to create qdrant index, collection_name:{binding.collection_name}.', fg='red'))
+ click.echo(
+ click.style(
+ f"Failed to create qdrant index, collection_name:{binding.collection_name}.", fg="red"
+ )
+ )
except Exception as e:
- click.echo(click.style('Failed to create qdrant client.', fg='red'))
+ click.echo(click.style("Failed to create qdrant client.", fg="red"))
- click.echo(
- click.style(f'Congratulations! Create {create_count} collection indexes.',
- fg='green'))
+ click.echo(click.style(f"Congratulations! Create {create_count} collection indexes.", fg="green"))
-@click.command('create-tenant', help='Create account and tenant.')
-@click.option('--email', prompt=True, help='The email address of the tenant account.')
-@click.option('--language', prompt=True, help='Account language, default: en-US.')
-def create_tenant(email: str, language: Optional[str] = None):
+@click.command("create-tenant", help="Create account and tenant.")
+@click.option("--email", prompt=True, help="The email address of the tenant account.")
+@click.option("--name", prompt=True, help="The workspace name of the tenant account.")
+@click.option("--language", prompt=True, help="Account language, default: en-US.")
+def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
"""
Create tenant account
"""
if not email:
- click.echo(click.style('Sorry, email is required.', fg='red'))
+ click.echo(click.style("Sorry, email is required.", fg="red"))
return
# Create account
email = email.strip()
- if '@' not in email:
- click.echo(click.style('Sorry, invalid email address.', fg='red'))
+ if "@" not in email:
+ click.echo(click.style("Sorry, invalid email address.", fg="red"))
return
- account_name = email.split('@')[0]
+ account_name = email.split("@")[0]
if language not in languages:
- language = 'en-US'
+ language = "en-US"
+
+ name = name.strip()
# generate random password
new_password = secrets.token_urlsafe(16)
# register account
- account = RegisterService.register(
- email=email,
- name=account_name,
- password=new_password,
- language=language
- )
+ account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
- TenantService.create_owner_tenant_if_not_exist(account)
+ TenantService.create_owner_tenant_if_not_exist(account, name)
- click.echo(click.style('Congratulations! Account and tenant created.\n'
- 'Account: {}\nPassword: {}'.format(email, new_password), fg='green'))
+ click.echo(
+ click.style(
+ "Congratulations! Account and tenant created.\n" "Account: {}\nPassword: {}".format(email, new_password),
+ fg="green",
+ )
+ )
-@click.command('upgrade-db', help='upgrade the database')
+@click.command("upgrade-db", help="upgrade the database")
def upgrade_db():
- click.echo('Preparing database migration...')
- lock = redis_client.lock(name='db_upgrade_lock', timeout=60)
+ click.echo("Preparing database migration...")
+ lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
if lock.acquire(blocking=False):
try:
- click.echo(click.style('Start database migration.', fg='green'))
+ click.echo(click.style("Start database migration.", fg="green"))
# run db migration
import flask_migrate
+
flask_migrate.upgrade()
- click.echo(click.style('Database migration successful!', fg='green'))
+ click.echo(click.style("Database migration successful!", fg="green"))
except Exception as e:
- logging.exception(f'Database migration failed, error: {e}')
+ logging.exception(f"Database migration failed, error: {e}")
finally:
lock.release()
else:
- click.echo('Database migration skipped')
+ click.echo("Database migration skipped")
-@click.command('fix-app-site-missing', help='Fix app related site missing issue.')
+@click.command("fix-app-site-missing", help="Fix app related site missing issue.")
def fix_app_site_missing():
"""
Fix app related site missing issue.
"""
- click.echo(click.style('Start fix app related site missing issue.', fg='green'))
+ click.echo(click.style("Start fix app related site missing issue.", fg="green"))
failed_app_ids = []
while True:
@@ -631,15 +658,14 @@ def fix_app_site_missing():
app_was_created.send(app, account=account)
except Exception as e:
failed_app_ids.append(app_id)
- click.echo(click.style('Fix app {} related site missing issue failed!'.format(app_id), fg='red'))
- logging.exception(f'Fix app related site missing issue failed, error: {e}')
+ click.echo(click.style("Fix app {} related site missing issue failed!".format(app_id), fg="red"))
+ logging.exception(f"Fix app related site missing issue failed, error: {e}")
continue
if not processed_count:
break
-
- click.echo(click.style('Congratulations! Fix app related site missing issue successful!', fg='green'))
+ click.echo(click.style("Congratulations! Fix app related site missing issue successful!", fg="green"))
def register_commands(app):
diff --git a/api/configs/__init__.py b/api/configs/__init__.py
index c0e28c34e1e1ea..3a172601c96382 100644
--- a/api/configs/__init__.py
+++ b/api/configs/__init__.py
@@ -1,3 +1,3 @@
from .app_config import DifyConfig
-dify_config = DifyConfig()
\ No newline at end of file
+dify_config = DifyConfig()
diff --git a/api/configs/app_config.py b/api/configs/app_config.py
index a5a4fc788d0d19..61de73c8689f8b 100644
--- a/api/configs/app_config.py
+++ b/api/configs/app_config.py
@@ -1,4 +1,3 @@
-from pydantic import Field, computed_field
from pydantic_settings import SettingsConfigDict
from configs.deploy import DeploymentConfig
@@ -12,58 +11,28 @@
class DifyConfig(
# Packaging info
PackagingInfo,
-
# Deployment configs
DeploymentConfig,
-
# Feature configs
FeatureConfig,
-
# Middleware configs
MiddlewareConfig,
-
# Extra service configs
ExtraServiceConfig,
-
# Enterprise feature configs
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
EnterpriseFeatureConfig,
):
- DEBUG: bool = Field(default=False, description='whether to enable debug mode.')
-
model_config = SettingsConfigDict(
# read from dotenv format config file
- env_file='.env',
- env_file_encoding='utf-8',
+ env_file=".env",
+ env_file_encoding="utf-8",
frozen=True,
-
# ignore extra attributes
- extra='ignore',
+ extra="ignore",
)
- CODE_MAX_NUMBER: int = 9223372036854775807
- CODE_MIN_NUMBER: int = -9223372036854775808
- CODE_MAX_STRING_LENGTH: int = 80000
- CODE_MAX_STRING_ARRAY_LENGTH: int = 30
- CODE_MAX_OBJECT_ARRAY_LENGTH: int = 30
- CODE_MAX_NUMBER_ARRAY_LENGTH: int = 1000
-
- HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = 300
- HTTP_REQUEST_MAX_READ_TIMEOUT: int = 600
- HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = 600
- HTTP_REQUEST_NODE_MAX_BINARY_SIZE: int = 1024 * 1024 * 10
-
- @computed_field
- def HTTP_REQUEST_NODE_READABLE_MAX_BINARY_SIZE(self) -> str:
- return f'{self.HTTP_REQUEST_NODE_MAX_BINARY_SIZE / 1024 / 1024:.2f}MB'
-
- HTTP_REQUEST_NODE_MAX_TEXT_SIZE: int = 1024 * 1024
-
- @computed_field
- def HTTP_REQUEST_NODE_READABLE_MAX_TEXT_SIZE(self) -> str:
- return f'{self.HTTP_REQUEST_NODE_MAX_TEXT_SIZE / 1024 / 1024:.2f}MB'
-
- SSRF_PROXY_HTTP_URL: str | None = None
- SSRF_PROXY_HTTPS_URL: str | None = None
-
- MODERATION_BUFFER_SIZE: int = Field(default=300, description='The buffer size for moderation.')
+ # Before adding any config,
+ # please consider to arrange it in the proper config group of existed or added
+ # for better readability and maintainability.
+ # Thanks for your concentration and consideration.
diff --git a/api/configs/deploy/__init__.py b/api/configs/deploy/__init__.py
index 219b315784323e..10271483c46697 100644
--- a/api/configs/deploy/__init__.py
+++ b/api/configs/deploy/__init__.py
@@ -6,22 +6,28 @@ class DeploymentConfig(BaseSettings):
"""
Deployment configs
"""
+
APPLICATION_NAME: str = Field(
- description='application name',
- default='langgenius/dify',
+ description="application name",
+ default="langgenius/dify",
+ )
+
+ DEBUG: bool = Field(
+ description="whether to enable debug mode.",
+ default=False,
)
TESTING: bool = Field(
- description='',
+ description="",
default=False,
)
EDITION: str = Field(
- description='deployment edition',
- default='SELF_HOSTED',
+ description="deployment edition",
+ default="SELF_HOSTED",
)
DEPLOY_ENV: str = Field(
- description='deployment environment, default to PRODUCTION.',
- default='PRODUCTION',
+ description="deployment environment, default to PRODUCTION.",
+ default="PRODUCTION",
)
diff --git a/api/configs/enterprise/__init__.py b/api/configs/enterprise/__init__.py
index b5d884e10e7b4d..c661593a44264d 100644
--- a/api/configs/enterprise/__init__.py
+++ b/api/configs/enterprise/__init__.py
@@ -7,13 +7,14 @@ class EnterpriseFeatureConfig(BaseSettings):
Enterprise feature configs.
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
"""
+
ENTERPRISE_ENABLED: bool = Field(
- description='whether to enable enterprise features.'
- 'Before using, please contact business@dify.ai by email to inquire about licensing matters.',
+ description="whether to enable enterprise features."
+ "Before using, please contact business@dify.ai by email to inquire about licensing matters.",
default=False,
)
CAN_REPLACE_LOGO: bool = Field(
- description='whether to allow replacing enterprise logo.',
+ description="whether to allow replacing enterprise logo.",
default=False,
)
diff --git a/api/configs/extra/notion_config.py b/api/configs/extra/notion_config.py
index b77e8adaaeba52..bd1268fa4565f8 100644
--- a/api/configs/extra/notion_config.py
+++ b/api/configs/extra/notion_config.py
@@ -8,27 +8,28 @@ class NotionConfig(BaseSettings):
"""
Notion integration configs
"""
+
NOTION_CLIENT_ID: Optional[str] = Field(
- description='Notion client ID',
+ description="Notion client ID",
default=None,
)
NOTION_CLIENT_SECRET: Optional[str] = Field(
- description='Notion client secret key',
+ description="Notion client secret key",
default=None,
)
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
- description='Notion integration type, default to None, available values: internal.',
+ description="Notion integration type, default to None, available values: internal.",
default=None,
)
NOTION_INTERNAL_SECRET: Optional[str] = Field(
- description='Notion internal secret key',
+ description="Notion internal secret key",
default=None,
)
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
- description='Notion integration token',
+ description="Notion integration token",
default=None,
)
diff --git a/api/configs/extra/sentry_config.py b/api/configs/extra/sentry_config.py
index e6517f730a577a..ea9ea60ffbd6c7 100644
--- a/api/configs/extra/sentry_config.py
+++ b/api/configs/extra/sentry_config.py
@@ -8,17 +8,18 @@ class SentryConfig(BaseSettings):
"""
Sentry configs
"""
+
SENTRY_DSN: Optional[str] = Field(
- description='Sentry DSN',
+ description="Sentry DSN",
default=None,
)
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
- description='Sentry trace sample rate',
+ description="Sentry trace sample rate",
default=1.0,
)
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
- description='Sentry profiles sample rate',
+ description="Sentry profiles sample rate",
default=1.0,
)
diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py
index 369b25d788a440..303bce2aa5050c 100644
--- a/api/configs/feature/__init__.py
+++ b/api/configs/feature/__init__.py
@@ -1,6 +1,6 @@
-from typing import Optional
+from typing import Annotated, Optional
-from pydantic import AliasChoices, Field, NonNegativeInt, PositiveInt, computed_field
+from pydantic import AliasChoices, Field, HttpUrl, NegativeInt, NonNegativeInt, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from configs.feature.hosted_service import HostedServiceConfig
@@ -10,16 +10,17 @@ class SecurityConfig(BaseSettings):
"""
Secret Key configs
"""
+
SECRET_KEY: Optional[str] = Field(
- description='Your App secret key will be used for securely signing the session cookie'
- 'Make sure you are changing this key for your deployment with a strong key.'
- 'You can generate a strong key using `openssl rand -base64 42`.'
- 'Alternatively you can set it with `SECRET_KEY` environment variable.',
+ description="Your App secret key will be used for securely signing the session cookie"
+ "Make sure you are changing this key for your deployment with a strong key."
+ "You can generate a strong key using `openssl rand -base64 42`."
+ "Alternatively you can set it with `SECRET_KEY` environment variable.",
default=None,
)
RESET_PASSWORD_TOKEN_EXPIRY_HOURS: PositiveInt = Field(
- description='Expiry time in hours for reset token',
+ description="Expiry time in hours for reset token",
default=24,
)
@@ -28,12 +29,13 @@ class AppExecutionConfig(BaseSettings):
"""
App Execution configs
"""
+
APP_MAX_EXECUTION_TIME: PositiveInt = Field(
- description='execution timeout in seconds for app execution',
+ description="execution timeout in seconds for app execution",
default=1200,
)
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
- description='max active request per app, 0 means unlimited',
+ description="max active request per app, 0 means unlimited",
default=0,
)
@@ -42,14 +44,70 @@ class CodeExecutionSandboxConfig(BaseSettings):
"""
Code Execution Sandbox configs
"""
- CODE_EXECUTION_ENDPOINT: str = Field(
- description='endpoint URL of code execution servcie',
- default='http://sandbox:8194',
+
+ CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
+ description="endpoint URL of code execution servcie",
+ default="http://sandbox:8194",
)
CODE_EXECUTION_API_KEY: str = Field(
- description='API key for code execution service',
- default='dify-sandbox',
+ description="API key for code execution service",
+ default="dify-sandbox",
+ )
+
+ CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
+ description="connect timeout in seconds for code execution request",
+ default=10.0,
+ )
+
+ CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
+ description="read timeout in seconds for code execution request",
+ default=60.0,
+ )
+
+ CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
+ description="write timeout in seconds for code execution request",
+ default=10.0,
+ )
+
+ CODE_MAX_NUMBER: PositiveInt = Field(
+ description="max depth for code execution",
+ default=9223372036854775807,
+ )
+
+ CODE_MIN_NUMBER: NegativeInt = Field(
+ description="",
+ default=-9223372036854775807,
+ )
+
+ CODE_MAX_DEPTH: PositiveInt = Field(
+ description="max depth for code execution",
+ default=5,
+ )
+
+ CODE_MAX_PRECISION: PositiveInt = Field(
+ description="max precision digits for float type in code execution",
+ default=20,
+ )
+
+ CODE_MAX_STRING_LENGTH: PositiveInt = Field(
+ description="max string length for code execution",
+ default=80000,
+ )
+
+ CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
+ description="",
+ default=30,
+ )
+
+ CODE_MAX_OBJECT_ARRAY_LENGTH: PositiveInt = Field(
+ description="",
+ default=30,
+ )
+
+ CODE_MAX_NUMBER_ARRAY_LENGTH: PositiveInt = Field(
+ description="",
+ default=1000,
)
@@ -57,28 +115,27 @@ class EndpointConfig(BaseSettings):
"""
Module URL configs
"""
+
CONSOLE_API_URL: str = Field(
- description='The backend URL prefix of the console API.'
- 'used to concatenate the login authorization callback or notion integration callback.',
- default='',
+ description="The backend URL prefix of the console API."
+ "used to concatenate the login authorization callback or notion integration callback.",
+ default="",
)
CONSOLE_WEB_URL: str = Field(
- description='The front-end URL prefix of the console web.'
- 'used to concatenate some front-end addresses and for CORS configuration use.',
- default='',
+ description="The front-end URL prefix of the console web."
+ "used to concatenate some front-end addresses and for CORS configuration use.",
+ default="",
)
SERVICE_API_URL: str = Field(
- description='Service API Url prefix.'
- 'used to display Service API Base Url to the front-end.',
- default='',
+ description="Service API Url prefix." "used to display Service API Base Url to the front-end.",
+ default="",
)
APP_WEB_URL: str = Field(
- description='WebApp Url prefix.'
- 'used to display WebAPP API Base Url to the front-end.',
- default='',
+ description="WebApp Url prefix." "used to display WebAPP API Base Url to the front-end.",
+ default="",
)
@@ -86,17 +143,18 @@ class FileAccessConfig(BaseSettings):
"""
File Access configs
"""
+
FILES_URL: str = Field(
- description='File preview or download Url prefix.'
- ' used to display File preview or download Url to the front-end or as Multi-model inputs;'
- 'Url is signed and has expiration time.',
- validation_alias=AliasChoices('FILES_URL', 'CONSOLE_API_URL'),
+ description="File preview or download Url prefix."
+ " used to display File preview or download Url to the front-end or as Multi-model inputs;"
+ "Url is signed and has expiration time.",
+ validation_alias=AliasChoices("FILES_URL", "CONSOLE_API_URL"),
alias_priority=1,
- default='',
+ default="",
)
FILES_ACCESS_TIMEOUT: int = Field(
- description='timeout in seconds for file accessing',
+ description="timeout in seconds for file accessing",
default=300,
)
@@ -105,23 +163,24 @@ class FileUploadConfig(BaseSettings):
"""
File Uploading configs
"""
+
UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field(
- description='size limit in Megabytes for uploading files',
+ description="size limit in Megabytes for uploading files",
default=15,
)
UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field(
- description='batch size limit for uploading files',
+ description="batch size limit for uploading files",
default=5,
)
UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field(
- description='image file size limit in Megabytes for uploading files',
+ description="image file size limit in Megabytes for uploading files",
default=10,
)
BATCH_UPLOAD_LIMIT: NonNegativeInt = Field(
- description='', # todo: to be clarified
+ description="", # todo: to be clarified
default=20,
)
@@ -130,45 +189,79 @@ class HttpConfig(BaseSettings):
"""
HTTP configs
"""
+
API_COMPRESSION_ENABLED: bool = Field(
- description='whether to enable HTTP response compression of gzip',
+ description="whether to enable HTTP response compression of gzip",
default=False,
)
inner_CONSOLE_CORS_ALLOW_ORIGINS: str = Field(
- description='',
- validation_alias=AliasChoices('CONSOLE_CORS_ALLOW_ORIGINS', 'CONSOLE_WEB_URL'),
- default='',
+ description="",
+ validation_alias=AliasChoices("CONSOLE_CORS_ALLOW_ORIGINS", "CONSOLE_WEB_URL"),
+ default="",
)
@computed_field
@property
def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
- return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(',')
+ return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
inner_WEB_API_CORS_ALLOW_ORIGINS: str = Field(
- description='',
- validation_alias=AliasChoices('WEB_API_CORS_ALLOW_ORIGINS'),
- default='*',
+ description="",
+ validation_alias=AliasChoices("WEB_API_CORS_ALLOW_ORIGINS"),
+ default="*",
)
@computed_field
@property
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
- return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(',')
+ return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
+
+ HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=10, description="connect timeout in seconds for HTTP request")
+ ] = 10
+
+ HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=60, description="read timeout in seconds for HTTP request")
+ ] = 60
+
+ HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=10, description="read timeout in seconds for HTTP request")
+ ] = 20
+
+ HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
+ description="",
+ default=10 * 1024 * 1024,
+ )
+
+ HTTP_REQUEST_NODE_MAX_TEXT_SIZE: PositiveInt = Field(
+ description="",
+ default=1 * 1024 * 1024,
+ )
+
+ SSRF_PROXY_HTTP_URL: Optional[str] = Field(
+ description="HTTP URL for SSRF proxy",
+ default=None,
+ )
+
+ SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
+ description="HTTPS URL for SSRF proxy",
+ default=None,
+ )
class InnerAPIConfig(BaseSettings):
"""
Inner API configs
"""
+
INNER_API: bool = Field(
- description='whether to enable the inner API',
+ description="whether to enable the inner API",
default=False,
)
INNER_API_KEY: Optional[str] = Field(
- description='The inner API key is used to authenticate the inner API',
+ description="The inner API key is used to authenticate the inner API",
default=None,
)
@@ -179,28 +272,27 @@ class LoggingConfig(BaseSettings):
"""
LOG_LEVEL: str = Field(
- description='Log output level, default to INFO.'
- 'It is recommended to set it to ERROR for production.',
- default='INFO',
+ description="Log output level, default to INFO." "It is recommended to set it to ERROR for production.",
+ default="INFO",
)
LOG_FILE: Optional[str] = Field(
- description='logging output file path',
+ description="logging output file path",
default=None,
)
LOG_FORMAT: str = Field(
- description='log format',
- default='%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s',
+ description="log format",
+ default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
)
LOG_DATEFORMAT: Optional[str] = Field(
- description='log date format',
+ description="log date format",
default=None,
)
LOG_TZ: Optional[str] = Field(
- description='specify log timezone, eg: America/New_York',
+ description="specify log timezone, eg: America/New_York",
default=None,
)
@@ -209,8 +301,9 @@ class ModelLoadBalanceConfig(BaseSettings):
"""
Model load balance configs
"""
+
MODEL_LB_ENABLED: bool = Field(
- description='whether to enable model load balancing',
+ description="whether to enable model load balancing",
default=False,
)
@@ -219,8 +312,9 @@ class BillingConfig(BaseSettings):
"""
Platform Billing Configurations
"""
+
BILLING_ENABLED: bool = Field(
- description='whether to enable billing',
+ description="whether to enable billing",
default=False,
)
@@ -229,9 +323,10 @@ class UpdateConfig(BaseSettings):
"""
Update configs
"""
+
CHECK_UPDATE_URL: str = Field(
- description='url for checking updates',
- default='https://updates.dify.ai',
+ description="url for checking updates",
+ default="https://updates.dify.ai",
)
@@ -241,47 +336,53 @@ class WorkflowConfig(BaseSettings):
"""
WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field(
- description='max execution steps in single workflow execution',
+ description="max execution steps in single workflow execution",
default=500,
)
WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field(
- description='max execution time in seconds in single workflow execution',
+ description="max execution time in seconds in single workflow execution",
default=1200,
)
WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field(
- description='max depth of calling in single workflow execution',
+ description="max depth of calling in single workflow execution",
default=5,
)
+ MAX_VARIABLE_SIZE: PositiveInt = Field(
+ description="The maximum size in bytes of a variable. default to 5KB.",
+ default=5 * 1024,
+ )
+
class OAuthConfig(BaseSettings):
"""
oauth configs
"""
+
OAUTH_REDIRECT_PATH: str = Field(
- description='redirect path for OAuth',
- default='/console/api/oauth/authorize',
+ description="redirect path for OAuth",
+ default="/console/api/oauth/authorize",
)
GITHUB_CLIENT_ID: Optional[str] = Field(
- description='GitHub client id for OAuth',
+ description="GitHub client id for OAuth",
default=None,
)
GITHUB_CLIENT_SECRET: Optional[str] = Field(
- description='GitHub client secret key for OAuth',
+ description="GitHub client secret key for OAuth",
default=None,
)
GOOGLE_CLIENT_ID: Optional[str] = Field(
- description='Google client id for OAuth',
+ description="Google client id for OAuth",
default=None,
)
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
- description='Google client secret key for OAuth',
+ description="Google client secret key for OAuth",
default=None,
)
@@ -291,9 +392,8 @@ class ModerationConfig(BaseSettings):
Moderation in app configs.
"""
- # todo: to be clarified in usage and unit
- OUTPUT_MODERATION_BUFFER_SIZE: PositiveInt = Field(
- description='buffer size for moderation',
+ MODERATION_BUFFER_SIZE: PositiveInt = Field(
+ description="buffer size for moderation",
default=300,
)
@@ -304,7 +404,7 @@ class ToolConfig(BaseSettings):
"""
TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field(
- description='max age in seconds for tool icon caching',
+ description="max age in seconds for tool icon caching",
default=3600,
)
@@ -315,52 +415,52 @@ class MailConfig(BaseSettings):
"""
MAIL_TYPE: Optional[str] = Field(
- description='Mail provider type name, default to None, availabile values are `smtp` and `resend`.',
+ description="Mail provider type name, default to None, availabile values are `smtp` and `resend`.",
default=None,
)
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
- description='default email address for sending from ',
+ description="default email address for sending from ",
default=None,
)
RESEND_API_KEY: Optional[str] = Field(
- description='API key for Resend',
+ description="API key for Resend",
default=None,
)
RESEND_API_URL: Optional[str] = Field(
- description='API URL for Resend',
+ description="API URL for Resend",
default=None,
)
SMTP_SERVER: Optional[str] = Field(
- description='smtp server host',
+ description="smtp server host",
default=None,
)
SMTP_PORT: Optional[int] = Field(
- description='smtp server port',
+ description="smtp server port",
default=465,
)
SMTP_USERNAME: Optional[str] = Field(
- description='smtp server username',
+ description="smtp server username",
default=None,
)
SMTP_PASSWORD: Optional[str] = Field(
- description='smtp server password',
+ description="smtp server password",
default=None,
)
SMTP_USE_TLS: bool = Field(
- description='whether to use TLS connection to smtp server',
+ description="whether to use TLS connection to smtp server",
default=False,
)
SMTP_OPPORTUNISTIC_TLS: bool = Field(
- description='whether to use opportunistic TLS connection to smtp server',
+ description="whether to use opportunistic TLS connection to smtp server",
default=False,
)
@@ -371,22 +471,22 @@ class RagEtlConfig(BaseSettings):
"""
ETL_TYPE: str = Field(
- description='RAG ETL type name, default to `dify`, available values are `dify` and `Unstructured`. ',
- default='dify',
+ description="RAG ETL type name, default to `dify`, available values are `dify` and `Unstructured`. ",
+ default="dify",
)
KEYWORD_DATA_SOURCE_TYPE: str = Field(
- description='source type for keyword data, default to `database`, available values are `database` .',
- default='database',
+ description="source type for keyword data, default to `database`, available values are `database` .",
+ default="database",
)
UNSTRUCTURED_API_URL: Optional[str] = Field(
- description='API URL for Unstructured',
+ description="API URL for Unstructured",
default=None,
)
UNSTRUCTURED_API_KEY: Optional[str] = Field(
- description='API key for Unstructured',
+ description="API key for Unstructured",
default=None,
)
@@ -397,22 +497,23 @@ class DataSetConfig(BaseSettings):
"""
CLEAN_DAY_SETTING: PositiveInt = Field(
- description='interval in days for cleaning up dataset',
+ description="interval in days for cleaning up dataset",
default=30,
)
DATASET_OPERATOR_ENABLED: bool = Field(
- description='whether to enable dataset operator',
+ description="whether to enable dataset operator",
default=False,
)
+
class WorkspaceConfig(BaseSettings):
"""
Workspace configs
"""
INVITE_EXPIRY_HOURS: PositiveInt = Field(
- description='workspaces invitation expiration in hours',
+ description="workspaces invitation expiration in hours",
default=72,
)
@@ -423,25 +524,81 @@ class IndexingConfig(BaseSettings):
"""
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
- description='max segmentation token length for indexing',
+ description="max segmentation token length for indexing",
default=1000,
)
class ImageFormatConfig(BaseSettings):
MULTIMODAL_SEND_IMAGE_FORMAT: str = Field(
- description='multi model send image format, support base64, url, default is base64',
- default='base64',
+ description="multi model send image format, support base64, url, default is base64",
+ default="base64",
)
class CeleryBeatConfig(BaseSettings):
CELERY_BEAT_SCHEDULER_TIME: int = Field(
- description='the time of the celery scheduler, default to 1 day',
+ description="the time of the celery scheduler, default to 1 day",
default=1,
)
+class PositionConfig(BaseSettings):
+ POSITION_PROVIDER_PINS: str = Field(
+ description="The heads of model providers",
+ default="",
+ )
+
+ POSITION_PROVIDER_INCLUDES: str = Field(
+ description="The included model providers",
+ default="",
+ )
+
+ POSITION_PROVIDER_EXCLUDES: str = Field(
+ description="The excluded model providers",
+ default="",
+ )
+
+ POSITION_TOOL_PINS: str = Field(
+ description="The heads of tools",
+ default="",
+ )
+
+ POSITION_TOOL_INCLUDES: str = Field(
+ description="The included tools",
+ default="",
+ )
+
+ POSITION_TOOL_EXCLUDES: str = Field(
+ description="The excluded tools",
+ default="",
+ )
+
+ @computed_field
+ def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
+ return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
+
+ @computed_field
+ def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
+
+ @computed_field
+ def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
+
+ @computed_field
+ def POSITION_TOOL_PINS_LIST(self) -> list[str]:
+ return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
+
+ @computed_field
+ def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
+
+ @computed_field
+ def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
+
+
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
@@ -466,7 +623,7 @@ class FeatureConfig(
UpdateConfig,
WorkflowConfig,
WorkspaceConfig,
-
+ PositionConfig,
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,
diff --git a/api/configs/feature/hosted_service/__init__.py b/api/configs/feature/hosted_service/__init__.py
index 88fe188587e7a1..f269d0ab9c89cd 100644
--- a/api/configs/feature/hosted_service/__init__.py
+++ b/api/configs/feature/hosted_service/__init__.py
@@ -10,62 +10,62 @@ class HostedOpenAiConfig(BaseSettings):
"""
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
- description='',
- default='gpt-3.5-turbo,'
- 'gpt-3.5-turbo-1106,'
- 'gpt-3.5-turbo-instruct,'
- 'gpt-3.5-turbo-16k,'
- 'gpt-3.5-turbo-16k-0613,'
- 'gpt-3.5-turbo-0613,'
- 'gpt-3.5-turbo-0125,'
- 'text-davinci-003',
+ description="",
+ default="gpt-3.5-turbo,"
+ "gpt-3.5-turbo-1106,"
+ "gpt-3.5-turbo-instruct,"
+ "gpt-3.5-turbo-16k,"
+ "gpt-3.5-turbo-16k-0613,"
+ "gpt-3.5-turbo-0613,"
+ "gpt-3.5-turbo-0125,"
+ "text-davinci-003",
)
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
- description='',
+ description="",
default=200,
)
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
HOSTED_OPENAI_PAID_MODELS: str = Field(
- description='',
- default='gpt-4,'
- 'gpt-4-turbo-preview,'
- 'gpt-4-turbo-2024-04-09,'
- 'gpt-4-1106-preview,'
- 'gpt-4-0125-preview,'
- 'gpt-3.5-turbo,'
- 'gpt-3.5-turbo-16k,'
- 'gpt-3.5-turbo-16k-0613,'
- 'gpt-3.5-turbo-1106,'
- 'gpt-3.5-turbo-0613,'
- 'gpt-3.5-turbo-0125,'
- 'gpt-3.5-turbo-instruct,'
- 'text-davinci-003',
+ description="",
+ default="gpt-4,"
+ "gpt-4-turbo-preview,"
+ "gpt-4-turbo-2024-04-09,"
+ "gpt-4-1106-preview,"
+ "gpt-4-0125-preview,"
+ "gpt-3.5-turbo,"
+ "gpt-3.5-turbo-16k,"
+ "gpt-3.5-turbo-16k-0613,"
+ "gpt-3.5-turbo-1106,"
+ "gpt-3.5-turbo-0613,"
+ "gpt-3.5-turbo-0125,"
+ "gpt-3.5-turbo-instruct,"
+ "text-davinci-003",
)
@@ -75,22 +75,22 @@ class HostedAzureOpenAiConfig(BaseSettings):
"""
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
- description='',
+ description="",
default=200,
)
@@ -101,27 +101,27 @@ class HostedAnthropicConfig(BaseSettings):
"""
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
- description='',
+ description="",
default=None,
)
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
- description='',
+ description="",
default=600000,
)
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
@@ -132,7 +132,7 @@ class HostedMinmaxConfig(BaseSettings):
"""
HOSTED_MINIMAX_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
@@ -143,7 +143,7 @@ class HostedSparkConfig(BaseSettings):
"""
HOSTED_SPARK_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
@@ -154,7 +154,7 @@ class HostedZhipuAIConfig(BaseSettings):
"""
HOSTED_ZHIPUAI_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
@@ -165,13 +165,13 @@ class HostedModerationConfig(BaseSettings):
"""
HOSTED_MODERATION_ENABLED: bool = Field(
- description='',
+ description="",
default=False,
)
HOSTED_MODERATION_PROVIDERS: str = Field(
- description='',
- default='',
+ description="",
+ default="",
)
@@ -181,15 +181,15 @@ class HostedFetchAppTemplateConfig(BaseSettings):
"""
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
- description='the mode for fetching app templates,'
- ' default to remote,'
- ' available values: remote, db, builtin',
- default='remote',
+ description="the mode for fetching app templates,"
+ " default to remote,"
+ " available values: remote, db, builtin",
+ default="remote",
)
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
- description='the domain for fetching remote app templates',
- default='https://tmpl.dify.ai',
+ description="the domain for fetching remote app templates",
+ default="https://tmpl.dify.ai",
)
@@ -202,7 +202,6 @@ class HostedServiceConfig(
HostedOpenAiConfig,
HostedSparkConfig,
HostedZhipuAIConfig,
-
# moderation
HostedModerationConfig,
):
diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py
index 07688e9aebfdc4..f25979e5d8f775 100644
--- a/api/configs/middleware/__init__.py
+++ b/api/configs/middleware/__init__.py
@@ -13,6 +13,7 @@
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
from configs.middleware.vdb.chroma_config import ChromaConfig
+from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
from configs.middleware.vdb.milvus_config import MilvusConfig
from configs.middleware.vdb.myscale_config import MyScaleConfig
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
@@ -28,108 +29,108 @@
class StorageConfig(BaseSettings):
STORAGE_TYPE: str = Field(
- description='storage type,'
- ' default to `local`,'
- ' available values are `local`, `s3`, `azure-blob`, `aliyun-oss`, `google-storage`.',
- default='local',
+ description="storage type,"
+ " default to `local`,"
+ " available values are `local`, `s3`, `azure-blob`, `aliyun-oss`, `google-storage`.",
+ default="local",
)
STORAGE_LOCAL_PATH: str = Field(
- description='local storage path',
- default='storage',
+ description="local storage path",
+ default="storage",
)
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: Optional[str] = Field(
- description='vector store type',
+ description="vector store type",
default=None,
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(
- description='keyword store type',
- default='jieba',
+ description="keyword store type",
+ default="jieba",
)
class DatabaseConfig:
DB_HOST: str = Field(
- description='db host',
- default='localhost',
+ description="db host",
+ default="localhost",
)
DB_PORT: PositiveInt = Field(
- description='db port',
+ description="db port",
default=5432,
)
DB_USERNAME: str = Field(
- description='db username',
- default='postgres',
+ description="db username",
+ default="postgres",
)
DB_PASSWORD: str = Field(
- description='db password',
- default='',
+ description="db password",
+ default="",
)
DB_DATABASE: str = Field(
- description='db database',
- default='dify',
+ description="db database",
+ default="dify",
)
DB_CHARSET: str = Field(
- description='db charset',
- default='',
+ description="db charset",
+ default="",
)
DB_EXTRAS: str = Field(
- description='db extras options. Example: keepalives_idle=60&keepalives=1',
- default='',
+ description="db extras options. Example: keepalives_idle=60&keepalives=1",
+ default="",
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
- description='db uri scheme',
- default='postgresql',
+ description="db uri scheme",
+ default="postgresql",
)
@computed_field
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
- f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}"
- if self.DB_CHARSET
- else self.DB_EXTRAS
+ f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
).strip("&")
db_extras = f"?{db_extras}" if db_extras else ""
- return (f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
- f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
- f"{db_extras}")
+ return (
+ f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
+ f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
+ f"{db_extras}"
+ )
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
- description='pool size of SqlAlchemy',
+ description="pool size of SqlAlchemy",
default=30,
)
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
- description='max overflows for SqlAlchemy',
+ description="max overflows for SqlAlchemy",
default=10,
)
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
- description='SqlAlchemy pool recycle',
+ description="SqlAlchemy pool recycle",
default=3600,
)
SQLALCHEMY_POOL_PRE_PING: bool = Field(
- description='whether to enable pool pre-ping in SqlAlchemy',
+ description="whether to enable pool pre-ping in SqlAlchemy",
default=False,
)
SQLALCHEMY_ECHO: bool | str = Field(
- description='whether to enable SqlAlchemy echo',
+ description="whether to enable SqlAlchemy echo",
default=False,
)
@@ -137,35 +138,38 @@ def SQLALCHEMY_DATABASE_URI(self) -> str:
@property
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
return {
- 'pool_size': self.SQLALCHEMY_POOL_SIZE,
- 'max_overflow': self.SQLALCHEMY_MAX_OVERFLOW,
- 'pool_recycle': self.SQLALCHEMY_POOL_RECYCLE,
- 'pool_pre_ping': self.SQLALCHEMY_POOL_PRE_PING,
- 'connect_args': {'options': '-c timezone=UTC'},
+ "pool_size": self.SQLALCHEMY_POOL_SIZE,
+ "max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
+ "pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
+ "pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
+ "connect_args": {"options": "-c timezone=UTC"},
}
class CeleryConfig(DatabaseConfig):
CELERY_BACKEND: str = Field(
- description='Celery backend, available values are `database`, `redis`',
- default='database',
+ description="Celery backend, available values are `database`, `redis`",
+ default="database",
)
CELERY_BROKER_URL: Optional[str] = Field(
- description='CELERY_BROKER_URL',
+ description="CELERY_BROKER_URL",
default=None,
)
@computed_field
@property
def CELERY_RESULT_BACKEND(self) -> str | None:
- return 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \
- if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL
+ return (
+ "db+{}".format(self.SQLALCHEMY_DATABASE_URI)
+ if self.CELERY_BACKEND == "database"
+ else self.CELERY_BROKER_URL
+ )
@computed_field
@property
def BROKER_USE_SSL(self) -> bool:
- return self.CELERY_BROKER_URL.startswith('rediss://') if self.CELERY_BROKER_URL else False
+ return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
class MiddlewareConfig(
@@ -174,7 +178,6 @@ class MiddlewareConfig(
DatabaseConfig,
KeywordStoreConfig,
RedisConfig,
-
# configs of storage and storage providers
StorageConfig,
AliyunOSSStorageConfig,
@@ -183,7 +186,6 @@ class MiddlewareConfig(
TencentCloudCOSStorageConfig,
S3StorageConfig,
OCIStorageConfig,
-
# configs of vdb and vdb providers
VectorStoreConfig,
AnalyticdbConfig,
@@ -199,5 +201,6 @@ class MiddlewareConfig(
TencentVectorDBConfig,
TiDBVectorConfig,
WeaviateConfig,
+ ElasticsearchConfig,
):
pass
diff --git a/api/configs/middleware/cache/redis_config.py b/api/configs/middleware/cache/redis_config.py
index 436ba5d4c01f5c..cacdaf6fb6df2a 100644
--- a/api/configs/middleware/cache/redis_config.py
+++ b/api/configs/middleware/cache/redis_config.py
@@ -8,32 +8,33 @@ class RedisConfig(BaseSettings):
"""
Redis configs
"""
+
REDIS_HOST: str = Field(
- description='Redis host',
- default='localhost',
+ description="Redis host",
+ default="localhost",
)
REDIS_PORT: PositiveInt = Field(
- description='Redis port',
+ description="Redis port",
default=6379,
)
REDIS_USERNAME: Optional[str] = Field(
- description='Redis username',
+ description="Redis username",
default=None,
)
REDIS_PASSWORD: Optional[str] = Field(
- description='Redis password',
+ description="Redis password",
default=None,
)
REDIS_DB: NonNegativeInt = Field(
- description='Redis database id, default to 0',
+ description="Redis database id, default to 0",
default=0,
)
REDIS_USE_SSL: bool = Field(
- description='whether to use SSL for Redis connection',
+ description="whether to use SSL for Redis connection",
default=False,
)
diff --git a/api/configs/middleware/storage/aliyun_oss_storage_config.py b/api/configs/middleware/storage/aliyun_oss_storage_config.py
index 19e6cafb1282b1..c1843dc26cee00 100644
--- a/api/configs/middleware/storage/aliyun_oss_storage_config.py
+++ b/api/configs/middleware/storage/aliyun_oss_storage_config.py
@@ -10,31 +10,36 @@ class AliyunOSSStorageConfig(BaseSettings):
"""
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
- description='Aliyun OSS bucket name',
+ description="Aliyun OSS bucket name",
default=None,
)
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
- description='Aliyun OSS access key',
+ description="Aliyun OSS access key",
default=None,
)
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
- description='Aliyun OSS secret key',
+ description="Aliyun OSS secret key",
default=None,
)
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
- description='Aliyun OSS endpoint URL',
+ description="Aliyun OSS endpoint URL",
default=None,
)
ALIYUN_OSS_REGION: Optional[str] = Field(
- description='Aliyun OSS region',
+ description="Aliyun OSS region",
default=None,
)
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
- description='Aliyun OSS authentication version',
+ description="Aliyun OSS authentication version",
+ default=None,
+ )
+
+ ALIYUN_OSS_PATH: Optional[str] = Field(
+ description="Aliyun OSS path",
default=None,
)
diff --git a/api/configs/middleware/storage/amazon_s3_storage_config.py b/api/configs/middleware/storage/amazon_s3_storage_config.py
index 2566fbd5da6b96..bef93261087092 100644
--- a/api/configs/middleware/storage/amazon_s3_storage_config.py
+++ b/api/configs/middleware/storage/amazon_s3_storage_config.py
@@ -10,36 +10,36 @@ class S3StorageConfig(BaseSettings):
"""
S3_ENDPOINT: Optional[str] = Field(
- description='S3 storage endpoint',
+ description="S3 storage endpoint",
default=None,
)
S3_REGION: Optional[str] = Field(
- description='S3 storage region',
+ description="S3 storage region",
default=None,
)
S3_BUCKET_NAME: Optional[str] = Field(
- description='S3 storage bucket name',
+ description="S3 storage bucket name",
default=None,
)
S3_ACCESS_KEY: Optional[str] = Field(
- description='S3 storage access key',
+ description="S3 storage access key",
default=None,
)
S3_SECRET_KEY: Optional[str] = Field(
- description='S3 storage secret key',
+ description="S3 storage secret key",
default=None,
)
S3_ADDRESS_STYLE: str = Field(
- description='S3 storage address style',
- default='auto',
+ description="S3 storage address style",
+ default="auto",
)
S3_USE_AWS_MANAGED_IAM: bool = Field(
- description='whether to use aws managed IAM for S3',
+ description="whether to use aws managed IAM for S3",
default=False,
)
diff --git a/api/configs/middleware/storage/azure_blob_storage_config.py b/api/configs/middleware/storage/azure_blob_storage_config.py
index 26e441c89bd4e4..10944b58eddc61 100644
--- a/api/configs/middleware/storage/azure_blob_storage_config.py
+++ b/api/configs/middleware/storage/azure_blob_storage_config.py
@@ -10,21 +10,21 @@ class AzureBlobStorageConfig(BaseSettings):
"""
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
- description='Azure Blob account name',
+ description="Azure Blob account name",
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
- description='Azure Blob account key',
+ description="Azure Blob account key",
default=None,
)
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
- description='Azure Blob container name',
+ description="Azure Blob container name",
default=None,
)
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
- description='Azure Blob account URL',
+ description="Azure Blob account URL",
default=None,
)
diff --git a/api/configs/middleware/storage/google_cloud_storage_config.py b/api/configs/middleware/storage/google_cloud_storage_config.py
index e1b0e34e0c32fd..10a2d97e8dbcdf 100644
--- a/api/configs/middleware/storage/google_cloud_storage_config.py
+++ b/api/configs/middleware/storage/google_cloud_storage_config.py
@@ -10,11 +10,11 @@ class GoogleCloudStorageConfig(BaseSettings):
"""
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
- description='Google Cloud storage bucket name',
+ description="Google Cloud storage bucket name",
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
- description='Google Cloud storage service account json base64',
+ description="Google Cloud storage service account json base64",
default=None,
)
diff --git a/api/configs/middleware/storage/oci_storage_config.py b/api/configs/middleware/storage/oci_storage_config.py
index 6c0c06746954f0..f8993496c952b8 100644
--- a/api/configs/middleware/storage/oci_storage_config.py
+++ b/api/configs/middleware/storage/oci_storage_config.py
@@ -10,27 +10,26 @@ class OCIStorageConfig(BaseSettings):
"""
OCI_ENDPOINT: Optional[str] = Field(
- description='OCI storage endpoint',
+ description="OCI storage endpoint",
default=None,
)
OCI_REGION: Optional[str] = Field(
- description='OCI storage region',
+ description="OCI storage region",
default=None,
)
OCI_BUCKET_NAME: Optional[str] = Field(
- description='OCI storage bucket name',
+ description="OCI storage bucket name",
default=None,
)
OCI_ACCESS_KEY: Optional[str] = Field(
- description='OCI storage access key',
+ description="OCI storage access key",
default=None,
)
OCI_SECRET_KEY: Optional[str] = Field(
- description='OCI storage secret key',
+ description="OCI storage secret key",
default=None,
)
-
diff --git a/api/configs/middleware/storage/tencent_cos_storage_config.py b/api/configs/middleware/storage/tencent_cos_storage_config.py
index 1060c7b93e0bf1..765ac08f3e4047 100644
--- a/api/configs/middleware/storage/tencent_cos_storage_config.py
+++ b/api/configs/middleware/storage/tencent_cos_storage_config.py
@@ -10,26 +10,26 @@ class TencentCloudCOSStorageConfig(BaseSettings):
"""
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
- description='Tencent Cloud COS bucket name',
+ description="Tencent Cloud COS bucket name",
default=None,
)
TENCENT_COS_REGION: Optional[str] = Field(
- description='Tencent Cloud COS region',
+ description="Tencent Cloud COS region",
default=None,
)
TENCENT_COS_SECRET_ID: Optional[str] = Field(
- description='Tencent Cloud COS secret id',
+ description="Tencent Cloud COS secret id",
default=None,
)
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
- description='Tencent Cloud COS secret key',
+ description="Tencent Cloud COS secret key",
default=None,
)
TENCENT_COS_SCHEME: Optional[str] = Field(
- description='Tencent Cloud COS scheme',
+ description="Tencent Cloud COS scheme",
default=None,
)
diff --git a/api/configs/middleware/vdb/analyticdb_config.py b/api/configs/middleware/vdb/analyticdb_config.py
index db2899265e204f..04f5b0e5bf7697 100644
--- a/api/configs/middleware/vdb/analyticdb_config.py
+++ b/api/configs/middleware/vdb/analyticdb_config.py
@@ -10,35 +10,28 @@ class AnalyticdbConfig(BaseModel):
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
"""
- ANALYTICDB_KEY_ID : Optional[str] = Field(
- default=None,
- description="The Access Key ID provided by Alibaba Cloud for authentication."
+ ANALYTICDB_KEY_ID: Optional[str] = Field(
+ default=None, description="The Access Key ID provided by Alibaba Cloud for authentication."
)
- ANALYTICDB_KEY_SECRET : Optional[str] = Field(
- default=None,
- description="The Secret Access Key corresponding to the Access Key ID for secure access."
+ ANALYTICDB_KEY_SECRET: Optional[str] = Field(
+ default=None, description="The Secret Access Key corresponding to the Access Key ID for secure access."
)
- ANALYTICDB_REGION_ID : Optional[str] = Field(
- default=None,
- description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou')."
+ ANALYTICDB_REGION_ID: Optional[str] = Field(
+ default=None, description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou')."
)
- ANALYTICDB_INSTANCE_ID : Optional[str] = Field(
+ ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
default=None,
- description="The unique identifier of the AnalyticDB instance you want to connect to (e.g., 'gp-ab123456').."
+ description="The unique identifier of the AnalyticDB instance you want to connect to (e.g., 'gp-ab123456')..",
)
- ANALYTICDB_ACCOUNT : Optional[str] = Field(
- default=None,
- description="The account name used to log in to the AnalyticDB instance."
+ ANALYTICDB_ACCOUNT: Optional[str] = Field(
+ default=None, description="The account name used to log in to the AnalyticDB instance."
)
- ANALYTICDB_PASSWORD : Optional[str] = Field(
- default=None,
- description="The password associated with the AnalyticDB account for authentication."
+ ANALYTICDB_PASSWORD: Optional[str] = Field(
+ default=None, description="The password associated with the AnalyticDB account for authentication."
)
- ANALYTICDB_NAMESPACE : Optional[str] = Field(
- default=None,
- description="The namespace within AnalyticDB for schema isolation."
+ ANALYTICDB_NAMESPACE: Optional[str] = Field(
+ default=None, description="The namespace within AnalyticDB for schema isolation."
)
- ANALYTICDB_NAMESPACE_PASSWORD : Optional[str] = Field(
- default=None,
- description="The password for accessing the specified namespace within the AnalyticDB instance."
+ ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
+ default=None, description="The password for accessing the specified namespace within the AnalyticDB instance."
)
diff --git a/api/configs/middleware/vdb/chroma_config.py b/api/configs/middleware/vdb/chroma_config.py
index f365879efb1a19..d386623a569d38 100644
--- a/api/configs/middleware/vdb/chroma_config.py
+++ b/api/configs/middleware/vdb/chroma_config.py
@@ -10,31 +10,31 @@ class ChromaConfig(BaseSettings):
"""
CHROMA_HOST: Optional[str] = Field(
- description='Chroma host',
+ description="Chroma host",
default=None,
)
CHROMA_PORT: PositiveInt = Field(
- description='Chroma port',
+ description="Chroma port",
default=8000,
)
CHROMA_TENANT: Optional[str] = Field(
- description='Chroma database',
+ description="Chroma database",
default=None,
)
CHROMA_DATABASE: Optional[str] = Field(
- description='Chroma database',
+ description="Chroma database",
default=None,
)
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
- description='Chroma authentication provider',
+ description="Chroma authentication provider",
default=None,
)
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
- description='Chroma authentication credentials',
+ description="Chroma authentication credentials",
default=None,
)
diff --git a/api/configs/middleware/vdb/elasticsearch_config.py b/api/configs/middleware/vdb/elasticsearch_config.py
new file mode 100644
index 00000000000000..5b6a8fd939c292
--- /dev/null
+++ b/api/configs/middleware/vdb/elasticsearch_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class ElasticsearchConfig(BaseSettings):
+ """
+ Elasticsearch configs
+ """
+
+ ELASTICSEARCH_HOST: Optional[str] = Field(
+ description="Elasticsearch host",
+ default="127.0.0.1",
+ )
+
+ ELASTICSEARCH_PORT: PositiveInt = Field(
+ description="Elasticsearch port",
+ default=9200,
+ )
+
+ ELASTICSEARCH_USERNAME: Optional[str] = Field(
+ description="Elasticsearch username",
+ default="elastic",
+ )
+
+ ELASTICSEARCH_PASSWORD: Optional[str] = Field(
+ description="Elasticsearch password",
+ default="elastic",
+ )
diff --git a/api/configs/middleware/vdb/milvus_config.py b/api/configs/middleware/vdb/milvus_config.py
index 01502d45901764..85466cd5cc0dab 100644
--- a/api/configs/middleware/vdb/milvus_config.py
+++ b/api/configs/middleware/vdb/milvus_config.py
@@ -10,31 +10,31 @@ class MilvusConfig(BaseSettings):
"""
MILVUS_HOST: Optional[str] = Field(
- description='Milvus host',
+ description="Milvus host",
default=None,
)
MILVUS_PORT: PositiveInt = Field(
- description='Milvus RestFul API port',
+ description="Milvus RestFul API port",
default=9091,
)
MILVUS_USER: Optional[str] = Field(
- description='Milvus user',
+ description="Milvus user",
default=None,
)
MILVUS_PASSWORD: Optional[str] = Field(
- description='Milvus password',
+ description="Milvus password",
default=None,
)
MILVUS_SECURE: bool = Field(
- description='whether to use SSL connection for Milvus',
+ description="whether to use SSL connection for Milvus",
default=False,
)
MILVUS_DATABASE: str = Field(
- description='Milvus database, default to `default`',
- default='default',
+ description="Milvus database, default to `default`",
+ default="default",
)
diff --git a/api/configs/middleware/vdb/myscale_config.py b/api/configs/middleware/vdb/myscale_config.py
index 895cd6f1769d6e..6451d26e1c65da 100644
--- a/api/configs/middleware/vdb/myscale_config.py
+++ b/api/configs/middleware/vdb/myscale_config.py
@@ -1,4 +1,3 @@
-
from pydantic import BaseModel, Field, PositiveInt
@@ -8,31 +7,31 @@ class MyScaleConfig(BaseModel):
"""
MYSCALE_HOST: str = Field(
- description='MyScale host',
- default='localhost',
+ description="MyScale host",
+ default="localhost",
)
MYSCALE_PORT: PositiveInt = Field(
- description='MyScale port',
+ description="MyScale port",
default=8123,
)
MYSCALE_USER: str = Field(
- description='MyScale user',
- default='default',
+ description="MyScale user",
+ default="default",
)
MYSCALE_PASSWORD: str = Field(
- description='MyScale password',
- default='',
+ description="MyScale password",
+ default="",
)
MYSCALE_DATABASE: str = Field(
- description='MyScale database name',
- default='default',
+ description="MyScale database name",
+ default="default",
)
MYSCALE_FTS_PARAMS: str = Field(
- description='MyScale fts index parameters',
- default='',
+ description="MyScale fts index parameters",
+ default="",
)
diff --git a/api/configs/middleware/vdb/opensearch_config.py b/api/configs/middleware/vdb/opensearch_config.py
index 15d6f5b6a97126..5823dc1433d83c 100644
--- a/api/configs/middleware/vdb/opensearch_config.py
+++ b/api/configs/middleware/vdb/opensearch_config.py
@@ -10,26 +10,26 @@ class OpenSearchConfig(BaseSettings):
"""
OPENSEARCH_HOST: Optional[str] = Field(
- description='OpenSearch host',
+ description="OpenSearch host",
default=None,
)
OPENSEARCH_PORT: PositiveInt = Field(
- description='OpenSearch port',
+ description="OpenSearch port",
default=9200,
)
OPENSEARCH_USER: Optional[str] = Field(
- description='OpenSearch user',
+ description="OpenSearch user",
default=None,
)
OPENSEARCH_PASSWORD: Optional[str] = Field(
- description='OpenSearch password',
+ description="OpenSearch password",
default=None,
)
OPENSEARCH_SECURE: bool = Field(
- description='whether to use SSL connection for OpenSearch',
+ description="whether to use SSL connection for OpenSearch",
default=False,
)
diff --git a/api/configs/middleware/vdb/oracle_config.py b/api/configs/middleware/vdb/oracle_config.py
index 888fc19492d67e..62614ae870cc89 100644
--- a/api/configs/middleware/vdb/oracle_config.py
+++ b/api/configs/middleware/vdb/oracle_config.py
@@ -10,26 +10,26 @@ class OracleConfig(BaseSettings):
"""
ORACLE_HOST: Optional[str] = Field(
- description='ORACLE host',
+ description="ORACLE host",
default=None,
)
ORACLE_PORT: Optional[PositiveInt] = Field(
- description='ORACLE port',
+ description="ORACLE port",
default=1521,
)
ORACLE_USER: Optional[str] = Field(
- description='ORACLE user',
+ description="ORACLE user",
default=None,
)
ORACLE_PASSWORD: Optional[str] = Field(
- description='ORACLE password',
+ description="ORACLE password",
default=None,
)
ORACLE_DATABASE: Optional[str] = Field(
- description='ORACLE database',
+ description="ORACLE database",
default=None,
)
diff --git a/api/configs/middleware/vdb/pgvector_config.py b/api/configs/middleware/vdb/pgvector_config.py
index 8a677f60a3a851..39a7c1d8d5e4ad 100644
--- a/api/configs/middleware/vdb/pgvector_config.py
+++ b/api/configs/middleware/vdb/pgvector_config.py
@@ -10,26 +10,26 @@ class PGVectorConfig(BaseSettings):
"""
PGVECTOR_HOST: Optional[str] = Field(
- description='PGVector host',
+ description="PGVector host",
default=None,
)
PGVECTOR_PORT: Optional[PositiveInt] = Field(
- description='PGVector port',
+ description="PGVector port",
default=5433,
)
PGVECTOR_USER: Optional[str] = Field(
- description='PGVector user',
+ description="PGVector user",
default=None,
)
PGVECTOR_PASSWORD: Optional[str] = Field(
- description='PGVector password',
+ description="PGVector password",
default=None,
)
PGVECTOR_DATABASE: Optional[str] = Field(
- description='PGVector database',
+ description="PGVector database",
default=None,
)
diff --git a/api/configs/middleware/vdb/pgvectors_config.py b/api/configs/middleware/vdb/pgvectors_config.py
index 39f52f22ff6c95..c40e5ff92103fd 100644
--- a/api/configs/middleware/vdb/pgvectors_config.py
+++ b/api/configs/middleware/vdb/pgvectors_config.py
@@ -10,26 +10,26 @@ class PGVectoRSConfig(BaseSettings):
"""
PGVECTO_RS_HOST: Optional[str] = Field(
- description='PGVectoRS host',
+ description="PGVectoRS host",
default=None,
)
PGVECTO_RS_PORT: Optional[PositiveInt] = Field(
- description='PGVectoRS port',
+ description="PGVectoRS port",
default=5431,
)
PGVECTO_RS_USER: Optional[str] = Field(
- description='PGVectoRS user',
+ description="PGVectoRS user",
default=None,
)
PGVECTO_RS_PASSWORD: Optional[str] = Field(
- description='PGVectoRS password',
+ description="PGVectoRS password",
default=None,
)
PGVECTO_RS_DATABASE: Optional[str] = Field(
- description='PGVectoRS database',
+ description="PGVectoRS database",
default=None,
)
diff --git a/api/configs/middleware/vdb/qdrant_config.py b/api/configs/middleware/vdb/qdrant_config.py
index c85bf9c7dc6047..27f75491c98767 100644
--- a/api/configs/middleware/vdb/qdrant_config.py
+++ b/api/configs/middleware/vdb/qdrant_config.py
@@ -10,26 +10,26 @@ class QdrantConfig(BaseSettings):
"""
QDRANT_URL: Optional[str] = Field(
- description='Qdrant url',
+ description="Qdrant url",
default=None,
)
QDRANT_API_KEY: Optional[str] = Field(
- description='Qdrant api key',
+ description="Qdrant api key",
default=None,
)
QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
- description='Qdrant client timeout in seconds',
+ description="Qdrant client timeout in seconds",
default=20,
)
QDRANT_GRPC_ENABLED: bool = Field(
- description='whether enable grpc support for Qdrant connection',
+ description="whether enable grpc support for Qdrant connection",
default=False,
)
QDRANT_GRPC_PORT: PositiveInt = Field(
- description='Qdrant grpc port',
+ description="Qdrant grpc port",
default=6334,
)
diff --git a/api/configs/middleware/vdb/relyt_config.py b/api/configs/middleware/vdb/relyt_config.py
index be93185f3ccab1..66b9ecc03f6aa6 100644
--- a/api/configs/middleware/vdb/relyt_config.py
+++ b/api/configs/middleware/vdb/relyt_config.py
@@ -10,26 +10,26 @@ class RelytConfig(BaseSettings):
"""
RELYT_HOST: Optional[str] = Field(
- description='Relyt host',
+ description="Relyt host",
default=None,
)
RELYT_PORT: PositiveInt = Field(
- description='Relyt port',
+ description="Relyt port",
default=9200,
)
RELYT_USER: Optional[str] = Field(
- description='Relyt user',
+ description="Relyt user",
default=None,
)
RELYT_PASSWORD: Optional[str] = Field(
- description='Relyt password',
+ description="Relyt password",
default=None,
)
RELYT_DATABASE: Optional[str] = Field(
- description='Relyt database',
- default='default',
+ description="Relyt database",
+ default="default",
)
diff --git a/api/configs/middleware/vdb/tencent_vector_config.py b/api/configs/middleware/vdb/tencent_vector_config.py
index 531ec840686eea..46b4cb6a24ff88 100644
--- a/api/configs/middleware/vdb/tencent_vector_config.py
+++ b/api/configs/middleware/vdb/tencent_vector_config.py
@@ -10,41 +10,41 @@ class TencentVectorDBConfig(BaseSettings):
"""
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
- description='Tencent Vector URL',
+ description="Tencent Vector URL",
default=None,
)
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
- description='Tencent Vector API key',
+ description="Tencent Vector API key",
default=None,
)
TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field(
- description='Tencent Vector timeout in seconds',
+ description="Tencent Vector timeout in seconds",
default=30,
)
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
- description='Tencent Vector username',
+ description="Tencent Vector username",
default=None,
)
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
- description='Tencent Vector password',
+ description="Tencent Vector password",
default=None,
)
TENCENT_VECTOR_DB_SHARD: PositiveInt = Field(
- description='Tencent Vector sharding number',
+ description="Tencent Vector sharding number",
default=1,
)
TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
- description='Tencent Vector replicas',
+ description="Tencent Vector replicas",
default=2,
)
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
- description='Tencent Vector Database',
+ description="Tencent Vector Database",
default=None,
)
diff --git a/api/configs/middleware/vdb/tidb_vector_config.py b/api/configs/middleware/vdb/tidb_vector_config.py
index 8d459691a895bd..dbcb276c01a1f1 100644
--- a/api/configs/middleware/vdb/tidb_vector_config.py
+++ b/api/configs/middleware/vdb/tidb_vector_config.py
@@ -10,26 +10,26 @@ class TiDBVectorConfig(BaseSettings):
"""
TIDB_VECTOR_HOST: Optional[str] = Field(
- description='TiDB Vector host',
+ description="TiDB Vector host",
default=None,
)
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
- description='TiDB Vector port',
+ description="TiDB Vector port",
default=4000,
)
TIDB_VECTOR_USER: Optional[str] = Field(
- description='TiDB Vector user',
+ description="TiDB Vector user",
default=None,
)
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
- description='TiDB Vector password',
+ description="TiDB Vector password",
default=None,
)
TIDB_VECTOR_DATABASE: Optional[str] = Field(
- description='TiDB Vector database',
+ description="TiDB Vector database",
default=None,
)
diff --git a/api/configs/middleware/vdb/weaviate_config.py b/api/configs/middleware/vdb/weaviate_config.py
index b985ecea121f9e..63d1022f6a4516 100644
--- a/api/configs/middleware/vdb/weaviate_config.py
+++ b/api/configs/middleware/vdb/weaviate_config.py
@@ -10,21 +10,21 @@ class WeaviateConfig(BaseSettings):
"""
WEAVIATE_ENDPOINT: Optional[str] = Field(
- description='Weaviate endpoint URL',
+ description="Weaviate endpoint URL",
default=None,
)
WEAVIATE_API_KEY: Optional[str] = Field(
- description='Weaviate API key',
+ description="Weaviate API key",
default=None,
)
WEAVIATE_GRPC_ENABLED: bool = Field(
- description='whether to enable gRPC for Weaviate connection',
+ description="whether to enable gRPC for Weaviate connection",
default=True,
)
WEAVIATE_BATCH_SIZE: PositiveInt = Field(
- description='Weaviate batch size',
+ description="Weaviate batch size",
default=100,
)
diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py
index 13c55ca4251118..dd096716120c69 100644
--- a/api/configs/packaging/__init__.py
+++ b/api/configs/packaging/__init__.py
@@ -8,11 +8,11 @@ class PackagingInfo(BaseSettings):
"""
CURRENT_VERSION: str = Field(
- description='Dify version',
- default='0.6.15',
+ description="Dify version",
+ default="0.7.2",
)
COMMIT_SHA: str = Field(
description="SHA-1 checksum of the git commit used to build the app",
- default='',
+ default="",
)
diff --git a/api/constants/__init__.py b/api/constants/__init__.py
index 08a27869948c95..e22c3268ef428b 100644
--- a/api/constants/__init__.py
+++ b/api/constants/__init__.py
@@ -1,2 +1 @@
-# TODO: Update all string in code to use this constant
-HIDDEN_VALUE = '[__HIDDEN__]'
\ No newline at end of file
+HIDDEN_VALUE = "[__HIDDEN__]"
diff --git a/api/constants/languages.py b/api/constants/languages.py
index 023d2f18a6f08a..524dc61b5790a4 100644
--- a/api/constants/languages.py
+++ b/api/constants/languages.py
@@ -1,21 +1,22 @@
language_timezone_mapping = {
- 'en-US': 'America/New_York',
- 'zh-Hans': 'Asia/Shanghai',
- 'zh-Hant': 'Asia/Taipei',
- 'pt-BR': 'America/Sao_Paulo',
- 'es-ES': 'Europe/Madrid',
- 'fr-FR': 'Europe/Paris',
- 'de-DE': 'Europe/Berlin',
- 'ja-JP': 'Asia/Tokyo',
- 'ko-KR': 'Asia/Seoul',
- 'ru-RU': 'Europe/Moscow',
- 'it-IT': 'Europe/Rome',
- 'uk-UA': 'Europe/Kyiv',
- 'vi-VN': 'Asia/Ho_Chi_Minh',
- 'ro-RO': 'Europe/Bucharest',
- 'pl-PL': 'Europe/Warsaw',
- 'hi-IN': 'Asia/Kolkata',
- 'tr-TR': 'Europe/Istanbul',
+ "en-US": "America/New_York",
+ "zh-Hans": "Asia/Shanghai",
+ "zh-Hant": "Asia/Taipei",
+ "pt-BR": "America/Sao_Paulo",
+ "es-ES": "Europe/Madrid",
+ "fr-FR": "Europe/Paris",
+ "de-DE": "Europe/Berlin",
+ "ja-JP": "Asia/Tokyo",
+ "ko-KR": "Asia/Seoul",
+ "ru-RU": "Europe/Moscow",
+ "it-IT": "Europe/Rome",
+ "uk-UA": "Europe/Kyiv",
+ "vi-VN": "Asia/Ho_Chi_Minh",
+ "ro-RO": "Europe/Bucharest",
+ "pl-PL": "Europe/Warsaw",
+ "hi-IN": "Asia/Kolkata",
+ "tr-TR": "Europe/Istanbul",
+ "fa-IR": "Asia/Tehran",
}
languages = list(language_timezone_mapping.keys())
@@ -25,6 +26,5 @@ def supported_language(lang):
if lang in languages:
return lang
- error = ('{lang} is not a valid language.'
- .format(lang=lang))
+ error = "{lang} is not a valid language.".format(lang=lang)
raise ValueError(error)
diff --git a/api/constants/model_template.py b/api/constants/model_template.py
index cc5a37025479fd..7e1a196356c4e2 100644
--- a/api/constants/model_template.py
+++ b/api/constants/model_template.py
@@ -5,82 +5,79 @@
default_app_templates = {
# workflow default mode
AppMode.WORKFLOW: {
- 'app': {
- 'mode': AppMode.WORKFLOW.value,
- 'enable_site': True,
- 'enable_api': True
+ "app": {
+ "mode": AppMode.WORKFLOW.value,
+ "enable_site": True,
+ "enable_api": True,
}
},
-
# completion default mode
AppMode.COMPLETION: {
- 'app': {
- 'mode': AppMode.COMPLETION.value,
- 'enable_site': True,
- 'enable_api': True
+ "app": {
+ "mode": AppMode.COMPLETION.value,
+ "enable_site": True,
+ "enable_api": True,
},
- 'model_config': {
- 'model': {
+ "model_config": {
+ "model": {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
- "completion_params": {}
+ "completion_params": {},
},
- 'user_input_form': json.dumps([
- {
- "paragraph": {
- "label": "Query",
- "variable": "query",
- "required": True,
- "default": ""
- }
- }
- ]),
- 'pre_prompt': '{{query}}'
+ "user_input_form": json.dumps(
+ [
+ {
+ "paragraph": {
+ "label": "Query",
+ "variable": "query",
+ "required": True,
+ "default": "",
+ },
+ },
+ ]
+ ),
+ "pre_prompt": "{{query}}",
},
-
},
-
# chat default mode
AppMode.CHAT: {
- 'app': {
- 'mode': AppMode.CHAT.value,
- 'enable_site': True,
- 'enable_api': True
+ "app": {
+ "mode": AppMode.CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
},
- 'model_config': {
- 'model': {
+ "model_config": {
+ "model": {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
- "completion_params": {}
- }
- }
+ "completion_params": {},
+ },
+ },
},
-
# advanced-chat default mode
AppMode.ADVANCED_CHAT: {
- 'app': {
- 'mode': AppMode.ADVANCED_CHAT.value,
- 'enable_site': True,
- 'enable_api': True
- }
+ "app": {
+ "mode": AppMode.ADVANCED_CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
+ },
},
-
# agent-chat default mode
AppMode.AGENT_CHAT: {
- 'app': {
- 'mode': AppMode.AGENT_CHAT.value,
- 'enable_site': True,
- 'enable_api': True
+ "app": {
+ "mode": AppMode.AGENT_CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
},
- 'model_config': {
- 'model': {
+ "model_config": {
+ "model": {
"provider": "openai",
"name": "gpt-4o",
"mode": "chat",
- "completion_params": {}
- }
- }
- }
+ "completion_params": {},
+ },
+ },
+ },
}
diff --git a/api/contexts/__init__.py b/api/contexts/__init__.py
index 306fac3a931298..623a1a28eb731e 100644
--- a/api/contexts/__init__.py
+++ b/api/contexts/__init__.py
@@ -1,3 +1,7 @@
from contextvars import ContextVar
-tenant_id: ContextVar[str] = ContextVar('tenant_id')
\ No newline at end of file
+from core.workflow.entities.variable_pool import VariablePool
+
+tenant_id: ContextVar[str] = ContextVar("tenant_id")
+
+workflow_variable_pool: ContextVar[VariablePool] = ContextVar("workflow_variable_pool")
diff --git a/api/controllers/__init__.py b/api/controllers/__init__.py
index b28b04f643122b..8b137891791fe9 100644
--- a/api/controllers/__init__.py
+++ b/api/controllers/__init__.py
@@ -1,3 +1 @@
-
-
diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py
index bef40bea7eb32e..eb7c1464d39722 100644
--- a/api/controllers/console/__init__.py
+++ b/api/controllers/console/__init__.py
@@ -2,7 +2,7 @@
from libs.external_api import ExternalApi
-bp = Blueprint('console', __name__, url_prefix='/console/api')
+bp = Blueprint("console", __name__, url_prefix="/console/api")
api = ExternalApi(bp)
# Import other controllers
@@ -17,6 +17,7 @@
audio,
completion,
conversation,
+ conversation_variables,
generator,
message,
model_config,
diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py
index 028be5de548b7d..a4ceec26620dc9 100644
--- a/api/controllers/console/admin.py
+++ b/api/controllers/console/admin.py
@@ -15,24 +15,24 @@
def admin_required(view):
@wraps(view)
def decorated(*args, **kwargs):
- if not os.getenv('ADMIN_API_KEY'):
- raise Unauthorized('API key is invalid.')
+ if not os.getenv("ADMIN_API_KEY"):
+ raise Unauthorized("API key is invalid.")
- auth_header = request.headers.get('Authorization')
+ auth_header = request.headers.get("Authorization")
if auth_header is None:
- raise Unauthorized('Authorization header is missing.')
+ raise Unauthorized("Authorization header is missing.")
- if ' ' not in auth_header:
- raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.')
+ if " " not in auth_header:
+ raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.")
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
- if auth_scheme != 'bearer':
- raise Unauthorized('Invalid Authorization header format. Expected \'Bearer \' format.')
+ if auth_scheme != "bearer":
+ raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.")
- if os.getenv('ADMIN_API_KEY') != auth_token:
- raise Unauthorized('API key is invalid.')
+ if os.getenv("ADMIN_API_KEY") != auth_token:
+ raise Unauthorized("API key is invalid.")
return view(*args, **kwargs)
@@ -44,37 +44,41 @@ class InsertExploreAppListApi(Resource):
@admin_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('app_id', type=str, required=True, nullable=False, location='json')
- parser.add_argument('desc', type=str, location='json')
- parser.add_argument('copyright', type=str, location='json')
- parser.add_argument('privacy_policy', type=str, location='json')
- parser.add_argument('custom_disclaimer', type=str, location='json')
- parser.add_argument('language', type=supported_language, required=True, nullable=False, location='json')
- parser.add_argument('category', type=str, required=True, nullable=False, location='json')
- parser.add_argument('position', type=int, required=True, nullable=False, location='json')
+ parser.add_argument("app_id", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("desc", type=str, location="json")
+ parser.add_argument("copyright", type=str, location="json")
+ parser.add_argument("privacy_policy", type=str, location="json")
+ parser.add_argument("custom_disclaimer", type=str, location="json")
+ parser.add_argument("language", type=supported_language, required=True, nullable=False, location="json")
+ parser.add_argument("category", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("position", type=int, required=True, nullable=False, location="json")
args = parser.parse_args()
- app = App.query.filter(App.id == args['app_id']).first()
+ app = App.query.filter(App.id == args["app_id"]).first()
if not app:
raise NotFound(f'App \'{args["app_id"]}\' is not found')
site = app.site
if not site:
- desc = args['desc'] if args['desc'] else ''
- copy_right = args['copyright'] if args['copyright'] else ''
- privacy_policy = args['privacy_policy'] if args['privacy_policy'] else ''
- custom_disclaimer = args['custom_disclaimer'] if args['custom_disclaimer'] else ''
+ desc = args["desc"] if args["desc"] else ""
+ copy_right = args["copyright"] if args["copyright"] else ""
+ privacy_policy = args["privacy_policy"] if args["privacy_policy"] else ""
+ custom_disclaimer = args["custom_disclaimer"] if args["custom_disclaimer"] else ""
else:
- desc = site.description if site.description else \
- args['desc'] if args['desc'] else ''
- copy_right = site.copyright if site.copyright else \
- args['copyright'] if args['copyright'] else ''
- privacy_policy = site.privacy_policy if site.privacy_policy else \
- args['privacy_policy'] if args['privacy_policy'] else ''
- custom_disclaimer = site.custom_disclaimer if site.custom_disclaimer else \
- args['custom_disclaimer'] if args['custom_disclaimer'] else ''
+ desc = site.description if site.description else args["desc"] if args["desc"] else ""
+ copy_right = site.copyright if site.copyright else args["copyright"] if args["copyright"] else ""
+ privacy_policy = (
+ site.privacy_policy if site.privacy_policy else args["privacy_policy"] if args["privacy_policy"] else ""
+ )
+ custom_disclaimer = (
+ site.custom_disclaimer
+ if site.custom_disclaimer
+ else args["custom_disclaimer"]
+ if args["custom_disclaimer"]
+ else ""
+ )
- recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args['app_id']).first()
+ recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args["app_id"]).first()
if not recommended_app:
recommended_app = RecommendedApp(
@@ -83,9 +87,9 @@ def post(self):
copyright=copy_right,
privacy_policy=privacy_policy,
custom_disclaimer=custom_disclaimer,
- language=args['language'],
- category=args['category'],
- position=args['position']
+ language=args["language"],
+ category=args["category"],
+ position=args["position"],
)
db.session.add(recommended_app)
@@ -93,21 +97,21 @@ def post(self):
app.is_public = True
db.session.commit()
- return {'result': 'success'}, 201
+ return {"result": "success"}, 201
else:
recommended_app.description = desc
recommended_app.copyright = copy_right
recommended_app.privacy_policy = privacy_policy
recommended_app.custom_disclaimer = custom_disclaimer
- recommended_app.language = args['language']
- recommended_app.category = args['category']
- recommended_app.position = args['position']
+ recommended_app.language = args["language"]
+ recommended_app.category = args["category"]
+ recommended_app.position = args["position"]
app.is_public = True
db.session.commit()
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class InsertExploreAppApi(Resource):
@@ -116,15 +120,14 @@ class InsertExploreAppApi(Resource):
def delete(self, app_id):
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == str(app_id)).first()
if not recommended_app:
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
app = App.query.filter(App.id == recommended_app.app_id).first()
if app:
app.is_public = False
installed_apps = InstalledApp.query.filter(
- InstalledApp.app_id == recommended_app.app_id,
- InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id
+ InstalledApp.app_id == recommended_app.app_id, InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id
).all()
for installed_app in installed_apps:
@@ -133,8 +136,8 @@ def delete(self, app_id):
db.session.delete(recommended_app)
db.session.commit()
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
-api.add_resource(InsertExploreAppListApi, '/admin/insert-explore-apps')
-api.add_resource(InsertExploreAppApi, '/admin/insert-explore-apps/')
+api.add_resource(InsertExploreAppListApi, "/admin/insert-explore-apps")
+api.add_resource(InsertExploreAppApi, "/admin/insert-explore-apps/")
diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py
index 324b8311752898..3f5e1adca23791 100644
--- a/api/controllers/console/apikey.py
+++ b/api/controllers/console/apikey.py
@@ -14,26 +14,21 @@
from .wraps import account_initialization_required
api_key_fields = {
- 'id': fields.String,
- 'type': fields.String,
- 'token': fields.String,
- 'last_used_at': TimestampField,
- 'created_at': TimestampField
+ "id": fields.String,
+ "type": fields.String,
+ "token": fields.String,
+ "last_used_at": TimestampField,
+ "created_at": TimestampField,
}
-api_key_list = {
- 'data': fields.List(fields.Nested(api_key_fields), attribute="items")
-}
+api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="items")}
def _get_resource(resource_id, tenant_id, resource_model):
- resource = resource_model.query.filter_by(
- id=resource_id, tenant_id=tenant_id
- ).first()
+ resource = resource_model.query.filter_by(id=resource_id, tenant_id=tenant_id).first()
if resource is None:
- flask_restful.abort(
- 404, message=f"{resource_model.__name__} not found.")
+ flask_restful.abort(404, message=f"{resource_model.__name__} not found.")
return resource
@@ -50,30 +45,32 @@ class BaseApiKeyListResource(Resource):
@marshal_with(api_key_list)
def get(self, resource_id):
resource_id = str(resource_id)
- _get_resource(resource_id, current_user.current_tenant_id,
- self.resource_model)
- keys = db.session.query(ApiToken). \
- filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id). \
- all()
+ _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
+ keys = (
+ db.session.query(ApiToken)
+ .filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
+ .all()
+ )
return {"items": keys}
@marshal_with(api_key_fields)
def post(self, resource_id):
resource_id = str(resource_id)
- _get_resource(resource_id, current_user.current_tenant_id,
- self.resource_model)
+ _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
if not current_user.is_admin_or_owner:
raise Forbidden()
- current_key_count = db.session.query(ApiToken). \
- filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id). \
- count()
+ current_key_count = (
+ db.session.query(ApiToken)
+ .filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
+ .count()
+ )
if current_key_count >= self.max_keys:
flask_restful.abort(
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
- code='max_keys_exceeded'
+ code="max_keys_exceeded",
)
key = ApiToken.generate_api_key(self.token_prefix, 24)
@@ -97,79 +94,78 @@ class BaseApiKeyResource(Resource):
def delete(self, resource_id, api_key_id):
resource_id = str(resource_id)
api_key_id = str(api_key_id)
- _get_resource(resource_id, current_user.current_tenant_id,
- self.resource_model)
+ _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
- key = db.session.query(ApiToken). \
- filter(getattr(ApiToken, self.resource_id_field) == resource_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id). \
- first()
+ key = (
+ db.session.query(ApiToken)
+ .filter(
+ getattr(ApiToken, self.resource_id_field) == resource_id,
+ ApiToken.type == self.resource_type,
+ ApiToken.id == api_key_id,
+ )
+ .first()
+ )
if key is None:
- flask_restful.abort(404, message='API key not found')
+ flask_restful.abort(404, message="API key not found")
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
db.session.commit()
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class AppApiKeyListResource(BaseApiKeyListResource):
-
def after_request(self, resp):
- resp.headers['Access-Control-Allow-Origin'] = '*'
- resp.headers['Access-Control-Allow-Credentials'] = 'true'
+ resp.headers["Access-Control-Allow-Origin"] = "*"
+ resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
- resource_type = 'app'
+ resource_type = "app"
resource_model = App
- resource_id_field = 'app_id'
- token_prefix = 'app-'
+ resource_id_field = "app_id"
+ token_prefix = "app-"
class AppApiKeyResource(BaseApiKeyResource):
-
def after_request(self, resp):
- resp.headers['Access-Control-Allow-Origin'] = '*'
- resp.headers['Access-Control-Allow-Credentials'] = 'true'
+ resp.headers["Access-Control-Allow-Origin"] = "*"
+ resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
- resource_type = 'app'
+ resource_type = "app"
resource_model = App
- resource_id_field = 'app_id'
+ resource_id_field = "app_id"
class DatasetApiKeyListResource(BaseApiKeyListResource):
-
def after_request(self, resp):
- resp.headers['Access-Control-Allow-Origin'] = '*'
- resp.headers['Access-Control-Allow-Credentials'] = 'true'
+ resp.headers["Access-Control-Allow-Origin"] = "*"
+ resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
- resource_type = 'dataset'
+ resource_type = "dataset"
resource_model = Dataset
- resource_id_field = 'dataset_id'
- token_prefix = 'ds-'
+ resource_id_field = "dataset_id"
+ token_prefix = "ds-"
class DatasetApiKeyResource(BaseApiKeyResource):
-
def after_request(self, resp):
- resp.headers['Access-Control-Allow-Origin'] = '*'
- resp.headers['Access-Control-Allow-Credentials'] = 'true'
+ resp.headers["Access-Control-Allow-Origin"] = "*"
+ resp.headers["Access-Control-Allow-Credentials"] = "true"
return resp
- resource_type = 'dataset'
+
+ resource_type = "dataset"
resource_model = Dataset
- resource_id_field = 'dataset_id'
+ resource_id_field = "dataset_id"
-api.add_resource(AppApiKeyListResource, '/apps//api-keys')
-api.add_resource(AppApiKeyResource,
- '/apps//api-keys/')
-api.add_resource(DatasetApiKeyListResource,
- '/datasets//api-keys')
-api.add_resource(DatasetApiKeyResource,
- '/datasets//api-keys/')
+api.add_resource(AppApiKeyListResource, "/apps//api-keys")
+api.add_resource(AppApiKeyResource, "/apps//api-keys/")
+api.add_resource(DatasetApiKeyListResource, "/datasets//api-keys")
+api.add_resource(DatasetApiKeyResource, "/datasets//api-keys/")
diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py
index fa2b3807e82778..e7346bdf1dd91b 100644
--- a/api/controllers/console/app/advanced_prompt_template.py
+++ b/api/controllers/console/app/advanced_prompt_template.py
@@ -8,19 +8,18 @@
class AdvancedPromptTemplateList(Resource):
-
@setup_required
@login_required
@account_initialization_required
def get(self):
-
parser = reqparse.RequestParser()
- parser.add_argument('app_mode', type=str, required=True, location='args')
- parser.add_argument('model_mode', type=str, required=True, location='args')
- parser.add_argument('has_context', type=str, required=False, default='true', location='args')
- parser.add_argument('model_name', type=str, required=True, location='args')
+ parser.add_argument("app_mode", type=str, required=True, location="args")
+ parser.add_argument("model_mode", type=str, required=True, location="args")
+ parser.add_argument("has_context", type=str, required=False, default="true", location="args")
+ parser.add_argument("model_name", type=str, required=True, location="args")
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)
-api.add_resource(AdvancedPromptTemplateList, '/app/prompt-templates')
\ No newline at end of file
+
+api.add_resource(AdvancedPromptTemplateList, "/app/prompt-templates")
diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py
index aee367276c0777..51899da7052111 100644
--- a/api/controllers/console/app/agent.py
+++ b/api/controllers/console/app/agent.py
@@ -18,15 +18,12 @@ class AgentLogApi(Resource):
def get(self, app_model):
"""Get agent logs"""
parser = reqparse.RequestParser()
- parser.add_argument('message_id', type=uuid_value, required=True, location='args')
- parser.add_argument('conversation_id', type=uuid_value, required=True, location='args')
+ parser.add_argument("message_id", type=uuid_value, required=True, location="args")
+ parser.add_argument("conversation_id", type=uuid_value, required=True, location="args")
args = parser.parse_args()
- return AgentService.get_agent_logs(
- app_model,
- args['conversation_id'],
- args['message_id']
- )
-
-api.add_resource(AgentLogApi, '/apps//agent/logs')
\ No newline at end of file
+ return AgentService.get_agent_logs(app_model, args["conversation_id"], args["message_id"])
+
+
+api.add_resource(AgentLogApi, "/apps//agent/logs")
diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py
index 1ac8e60dcd2613..1ea1c82679defe 100644
--- a/api/controllers/console/app/annotation.py
+++ b/api/controllers/console/app/annotation.py
@@ -21,24 +21,23 @@ class AnnotationReplyActionApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
def post(self, app_id, action):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
parser = reqparse.RequestParser()
- parser.add_argument('score_threshold', required=True, type=float, location='json')
- parser.add_argument('embedding_provider_name', required=True, type=str, location='json')
- parser.add_argument('embedding_model_name', required=True, type=str, location='json')
+ parser.add_argument("score_threshold", required=True, type=float, location="json")
+ parser.add_argument("embedding_provider_name", required=True, type=str, location="json")
+ parser.add_argument("embedding_model_name", required=True, type=str, location="json")
args = parser.parse_args()
- if action == 'enable':
+ if action == "enable":
result = AppAnnotationService.enable_app_annotation(args, app_id)
- elif action == 'disable':
+ elif action == "disable":
result = AppAnnotationService.disable_app_annotation(app_id)
else:
- raise ValueError('Unsupported annotation reply action')
+ raise ValueError("Unsupported annotation reply action")
return result, 200
@@ -47,8 +46,7 @@ class AppAnnotationSettingDetailApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
@@ -61,15 +59,14 @@ class AppAnnotationSettingUpdateApi(Resource):
@login_required
@account_initialization_required
def post(self, app_id, annotation_setting_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_setting_id = str(annotation_setting_id)
parser = reqparse.RequestParser()
- parser.add_argument('score_threshold', required=True, type=float, location='json')
+ parser.add_argument("score_threshold", required=True, type=float, location="json")
args = parser.parse_args()
result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, args)
@@ -80,29 +77,24 @@ class AnnotationReplyActionStatusApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
def get(self, app_id, job_id, action):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
job_id = str(job_id)
- app_annotation_job_key = '{}_app_annotation_job_{}'.format(action, str(job_id))
+ app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
cache_result = redis_client.get(app_annotation_job_key)
if cache_result is None:
raise ValueError("The job is not exist.")
job_status = cache_result.decode()
- error_msg = ''
- if job_status == 'error':
- app_annotation_error_key = '{}_app_annotation_error_{}'.format(action, str(job_id))
+ error_msg = ""
+ if job_status == "error":
+ app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id))
error_msg = redis_client.get(app_annotation_error_key).decode()
- return {
- 'job_id': job_id,
- 'job_status': job_status,
- 'error_msg': error_msg
- }, 200
+ return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
class AnnotationListApi(Resource):
@@ -110,22 +102,21 @@ class AnnotationListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
- page = request.args.get('page', default=1, type=int)
- limit = request.args.get('limit', default=20, type=int)
- keyword = request.args.get('keyword', default=None, type=str)
+ page = request.args.get("page", default=1, type=int)
+ limit = request.args.get("limit", default=20, type=int)
+ keyword = request.args.get("keyword", default=None, type=str)
app_id = str(app_id)
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
response = {
- 'data': marshal(annotation_list, annotation_fields),
- 'has_more': len(annotation_list) == limit,
- 'limit': limit,
- 'total': total,
- 'page': page
+ "data": marshal(annotation_list, annotation_fields),
+ "has_more": len(annotation_list) == limit,
+ "limit": limit,
+ "total": total,
+ "page": page,
}
return response, 200
@@ -135,15 +126,12 @@ class AnnotationExportApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
- response = {
- 'data': marshal(annotation_list, annotation_fields)
- }
+ response = {"data": marshal(annotation_list, annotation_fields)}
return response, 200
@@ -151,17 +139,16 @@ class AnnotationCreateApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
@marshal_with(annotation_fields)
def post(self, app_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
parser = reqparse.RequestParser()
- parser.add_argument('question', required=True, type=str, location='json')
- parser.add_argument('answer', required=True, type=str, location='json')
+ parser.add_argument("question", required=True, type=str, location="json")
+ parser.add_argument("answer", required=True, type=str, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id)
return annotation
@@ -171,18 +158,17 @@ class AnnotationUpdateDeleteApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
@marshal_with(annotation_fields)
def post(self, app_id, annotation_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_id = str(annotation_id)
parser = reqparse.RequestParser()
- parser.add_argument('question', required=True, type=str, location='json')
- parser.add_argument('answer', required=True, type=str, location='json')
+ parser.add_argument("question", required=True, type=str, location="json")
+ parser.add_argument("answer", required=True, type=str, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id)
return annotation
@@ -191,37 +177,35 @@ def post(self, app_id, annotation_id):
@login_required
@account_initialization_required
def delete(self, app_id, annotation_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_id = str(annotation_id)
AppAnnotationService.delete_app_annotation(app_id, annotation_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class AnnotationBatchImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
def post(self, app_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
# get file from request
- file = request.files['file']
+ file = request.files["file"]
# check file
- if 'file' not in request.files:
+ if "file" not in request.files:
raise NoFileUploadedError()
if len(request.files) > 1:
raise TooManyFilesError()
# check file type
- if not file.filename.endswith('.csv'):
+ if not file.filename.endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
return AppAnnotationService.batch_import_app_annotations(app_id, file)
@@ -230,28 +214,23 @@ class AnnotationBatchImportStatusApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
def get(self, app_id, job_id):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
job_id = str(job_id)
- indexing_cache_key = 'app_annotation_batch_import_{}'.format(str(job_id))
+ indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
cache_result = redis_client.get(indexing_cache_key)
if cache_result is None:
raise ValueError("The job is not exist.")
job_status = cache_result.decode()
- error_msg = ''
- if job_status == 'error':
- indexing_error_msg_key = 'app_annotation_batch_import_error_msg_{}'.format(str(job_id))
+ error_msg = ""
+ if job_status == "error":
+ indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
error_msg = redis_client.get(indexing_error_msg_key).decode()
- return {
- 'job_id': job_id,
- 'job_status': job_status,
- 'error_msg': error_msg
- }, 200
+ return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
class AnnotationHitHistoryListApi(Resource):
@@ -259,34 +238,35 @@ class AnnotationHitHistoryListApi(Resource):
@login_required
@account_initialization_required
def get(self, app_id, annotation_id):
- # The role of the current user in the table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
- page = request.args.get('page', default=1, type=int)
- limit = request.args.get('limit', default=20, type=int)
+ page = request.args.get("page", default=1, type=int)
+ limit = request.args.get("limit", default=20, type=int)
app_id = str(app_id)
annotation_id = str(annotation_id)
- annotation_hit_history_list, total = AppAnnotationService.get_annotation_hit_histories(app_id, annotation_id,
- page, limit)
+ annotation_hit_history_list, total = AppAnnotationService.get_annotation_hit_histories(
+ app_id, annotation_id, page, limit
+ )
response = {
- 'data': marshal(annotation_hit_history_list, annotation_hit_history_fields),
- 'has_more': len(annotation_hit_history_list) == limit,
- 'limit': limit,
- 'total': total,
- 'page': page
+ "data": marshal(annotation_hit_history_list, annotation_hit_history_fields),
+ "has_more": len(annotation_hit_history_list) == limit,
+ "limit": limit,
+ "total": total,
+ "page": page,
}
return response
-api.add_resource(AnnotationReplyActionApi, '/apps//annotation-reply/')
-api.add_resource(AnnotationReplyActionStatusApi,
- '/apps//annotation-reply//status/')
-api.add_resource(AnnotationListApi, '/apps//annotations')
-api.add_resource(AnnotationExportApi, '/apps//annotations/export')
-api.add_resource(AnnotationUpdateDeleteApi, '/apps//annotations/')
-api.add_resource(AnnotationBatchImportApi, '/apps//annotations/batch-import')
-api.add_resource(AnnotationBatchImportStatusApi, '/apps//annotations/batch-import-status/')
-api.add_resource(AnnotationHitHistoryListApi, '/apps//annotations//hit-histories')
-api.add_resource(AppAnnotationSettingDetailApi, '/apps//annotation-setting')
-api.add_resource(AppAnnotationSettingUpdateApi, '/apps//annotation-settings/')
+api.add_resource(AnnotationReplyActionApi, "/apps//annotation-reply/")
+api.add_resource(
+ AnnotationReplyActionStatusApi, "/apps//annotation-reply//status/"
+)
+api.add_resource(AnnotationListApi, "/apps//annotations")
+api.add_resource(AnnotationExportApi, "/apps//annotations/export")
+api.add_resource(AnnotationUpdateDeleteApi, "/apps//annotations/")
+api.add_resource(AnnotationBatchImportApi, "/apps//annotations/batch-import")
+api.add_resource(AnnotationBatchImportStatusApi, "/apps//annotations/batch-import-status/")
+api.add_resource(AnnotationHitHistoryListApi, "/apps//annotations//hit-histories")
+api.add_resource(AppAnnotationSettingDetailApi, "/apps//annotation-setting")
+api.add_resource(AppAnnotationSettingUpdateApi, "/apps//annotation-settings/")
diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py
index 2f304b970c6050..cc9c8b31cb6be7 100644
--- a/api/controllers/console/app/app.py
+++ b/api/controllers/console/app/app.py
@@ -18,27 +18,35 @@
from services.app_dsl_service import AppDslService
from services.app_service import AppService
-ALLOW_CREATE_APP_MODES = ['chat', 'agent-chat', 'advanced-chat', 'workflow', 'completion']
+ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
class AppListApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
def get(self):
"""Get app list"""
+
def uuid_list(value):
try:
- return [str(uuid.UUID(v)) for v in value.split(',')]
+ return [str(uuid.UUID(v)) for v in value.split(",")]
except ValueError:
abort(400, message="Invalid UUID format in tag_ids.")
+
parser = reqparse.RequestParser()
- parser.add_argument('page', type=inputs.int_range(1, 99999), required=False, default=1, location='args')
- parser.add_argument('limit', type=inputs.int_range(1, 100), required=False, default=20, location='args')
- parser.add_argument('mode', type=str, choices=['chat', 'workflow', 'agent-chat', 'channel', 'all'], default='all', location='args', required=False)
- parser.add_argument('name', type=str, location='args', required=False)
- parser.add_argument('tag_ids', type=uuid_list, location='args', required=False)
+ parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
+ parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
+ parser.add_argument(
+ "mode",
+ type=str,
+ choices=["chat", "workflow", "agent-chat", "channel", "all"],
+ default="all",
+ location="args",
+ required=False,
+ )
+ parser.add_argument("name", type=str, location="args", required=False)
+ parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
args = parser.parse_args()
@@ -46,7 +54,7 @@ def uuid_list(value):
app_service = AppService()
app_pagination = app_service.get_paginate_apps(current_user.current_tenant_id, args)
if not app_pagination:
- return {'data': [], 'total': 0, 'page': 1, 'limit': 20, 'has_more': False}
+ return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
return marshal(app_pagination, app_pagination_fields)
@@ -54,22 +62,23 @@ def uuid_list(value):
@login_required
@account_initialization_required
@marshal_with(app_detail_fields)
- @cloud_edition_billing_resource_check('apps')
+ @cloud_edition_billing_resource_check("apps")
def post(self):
"""Create app"""
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, required=True, location='json')
- parser.add_argument('description', type=str, location='json')
- parser.add_argument('mode', type=str, choices=ALLOW_CREATE_APP_MODES, location='json')
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
+ parser.add_argument("name", type=str, required=True, location="json")
+ parser.add_argument("description", type=str, location="json")
+ parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
+ parser.add_argument("icon_type", type=str, location="json")
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
- if 'mode' not in args or args['mode'] is None:
+ if "mode" not in args or args["mode"] is None:
raise BadRequest("mode is required")
app_service = AppService()
@@ -83,7 +92,7 @@ class AppImportApi(Resource):
@login_required
@account_initialization_required
@marshal_with(app_detail_fields_with_site)
- @cloud_edition_billing_resource_check('apps')
+ @cloud_edition_billing_resource_check("apps")
def post(self):
"""Import app"""
# The role of the current user in the ta table must be admin, owner, or editor
@@ -91,18 +100,16 @@ def post(self):
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('data', type=str, required=True, nullable=False, location='json')
- parser.add_argument('name', type=str, location='json')
- parser.add_argument('description', type=str, location='json')
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
+ parser.add_argument("data", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("name", type=str, location="json")
+ parser.add_argument("description", type=str, location="json")
+ parser.add_argument("icon_type", type=str, location="json")
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
app = AppDslService.import_and_create_new_app(
- tenant_id=current_user.current_tenant_id,
- data=args['data'],
- args=args,
- account=current_user
+ tenant_id=current_user.current_tenant_id, data=args["data"], args=args, account=current_user
)
return app, 201
@@ -113,7 +120,7 @@ class AppImportFromUrlApi(Resource):
@login_required
@account_initialization_required
@marshal_with(app_detail_fields_with_site)
- @cloud_edition_billing_resource_check('apps')
+ @cloud_edition_billing_resource_check("apps")
def post(self):
"""Import app from url"""
# The role of the current user in the ta table must be admin, owner, or editor
@@ -121,25 +128,21 @@ def post(self):
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('url', type=str, required=True, nullable=False, location='json')
- parser.add_argument('name', type=str, location='json')
- parser.add_argument('description', type=str, location='json')
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
+ parser.add_argument("url", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("name", type=str, location="json")
+ parser.add_argument("description", type=str, location="json")
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
app = AppDslService.import_and_create_new_app_from_url(
- tenant_id=current_user.current_tenant_id,
- url=args['url'],
- args=args,
- account=current_user
+ tenant_id=current_user.current_tenant_id, url=args["url"], args=args, account=current_user
)
return app, 201
class AppApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -163,13 +166,14 @@ def put(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, required=True, nullable=False, location='json')
- parser.add_argument('description', type=str, location='json')
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
- parser.add_argument('max_active_requests', type=int, location='json')
+ parser.add_argument("name", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("description", type=str, location="json")
+ parser.add_argument("icon_type", type=str, location="json")
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
+ parser.add_argument("max_active_requests", type=int, location="json")
args = parser.parse_args()
app_service = AppService()
@@ -190,7 +194,7 @@ def delete(self, app_model):
app_service = AppService()
app_service.delete_app(app_model)
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class AppCopyApi(Resource):
@@ -206,18 +210,16 @@ def post(self, app_model):
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, location='json')
- parser.add_argument('description', type=str, location='json')
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
+ parser.add_argument("name", type=str, location="json")
+ parser.add_argument("description", type=str, location="json")
+ parser.add_argument("icon_type", type=str, location="json")
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
data = AppDslService.export_dsl(app_model=app_model, include_secret=True)
app = AppDslService.import_and_create_new_app(
- tenant_id=current_user.current_tenant_id,
- data=data,
- args=args,
- account=current_user
+ tenant_id=current_user.current_tenant_id, data=data, args=args, account=current_user
)
return app, 201
@@ -236,12 +238,10 @@ def get(self, app_model):
# Add include_secret params
parser = reqparse.RequestParser()
- parser.add_argument('include_secret', type=inputs.boolean, default=False, location='args')
+ parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args")
args = parser.parse_args()
- return {
- "data": AppDslService.export_dsl(app_model=app_model, include_secret=args['include_secret'])
- }
+ return {"data": AppDslService.export_dsl(app_model=app_model, include_secret=args["include_secret"])}
class AppNameApi(Resource):
@@ -254,13 +254,13 @@ def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, required=True, location='json')
+ parser.add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
- app_model = app_service.update_app_name(app_model, args.get('name'))
+ app_model = app_service.update_app_name(app_model, args.get("name"))
return app_model
@@ -275,14 +275,14 @@ def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('icon', type=str, location='json')
- parser.add_argument('icon_background', type=str, location='json')
+ parser.add_argument("icon", type=str, location="json")
+ parser.add_argument("icon_background", type=str, location="json")
args = parser.parse_args()
app_service = AppService()
- app_model = app_service.update_app_icon(app_model, args.get('icon'), args.get('icon_background'))
+ app_model = app_service.update_app_icon(app_model, args.get("icon"), args.get("icon_background"))
return app_model
@@ -297,13 +297,13 @@ def post(self, app_model):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('enable_site', type=bool, required=True, location='json')
+ parser.add_argument("enable_site", type=bool, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
- app_model = app_service.update_app_site_status(app_model, args.get('enable_site'))
+ app_model = app_service.update_app_site_status(app_model, args.get("enable_site"))
return app_model
@@ -318,13 +318,13 @@ def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('enable_api', type=bool, required=True, location='json')
+ parser.add_argument("enable_api", type=bool, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
- app_model = app_service.update_app_api_status(app_model, args.get('enable_api'))
+ app_model = app_service.update_app_api_status(app_model, args.get("enable_api"))
return app_model
@@ -335,9 +335,7 @@ class AppTraceApi(Resource):
@account_initialization_required
def get(self, app_id):
"""Get app trace"""
- app_trace_config = OpsTraceManager.get_app_tracing_config(
- app_id=app_id
- )
+ app_trace_config = OpsTraceManager.get_app_tracing_config(app_id=app_id)
return app_trace_config
@@ -349,27 +347,27 @@ def post(self, app_id):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('enabled', type=bool, required=True, location='json')
- parser.add_argument('tracing_provider', type=str, required=True, location='json')
+ parser.add_argument("enabled", type=bool, required=True, location="json")
+ parser.add_argument("tracing_provider", type=str, required=True, location="json")
args = parser.parse_args()
OpsTraceManager.update_app_tracing_config(
app_id=app_id,
- enabled=args['enabled'],
- tracing_provider=args['tracing_provider'],
+ enabled=args["enabled"],
+ tracing_provider=args["tracing_provider"],
)
return {"result": "success"}
-api.add_resource(AppListApi, '/apps')
-api.add_resource(AppImportApi, '/apps/import')
-api.add_resource(AppImportFromUrlApi, '/apps/import/url')
-api.add_resource(AppApi, '/apps/')
-api.add_resource(AppCopyApi, '/apps//copy')
-api.add_resource(AppExportApi, '/apps//export')
-api.add_resource(AppNameApi, '/apps//name')
-api.add_resource(AppIconApi, '/apps//icon')
-api.add_resource(AppSiteStatus, '/apps//site-enable')
-api.add_resource(AppApiStatus, '/apps//api-enable')
-api.add_resource(AppTraceApi, '/apps//trace')
+api.add_resource(AppListApi, "/apps")
+api.add_resource(AppImportApi, "/apps/import")
+api.add_resource(AppImportFromUrlApi, "/apps/import/url")
+api.add_resource(AppApi, "/apps/")
+api.add_resource(AppCopyApi, "/apps//copy")
+api.add_resource(AppExportApi, "/apps//export")
+api.add_resource(AppNameApi, "/apps//name")
+api.add_resource(AppIconApi, "/apps//icon")
+api.add_resource(AppSiteStatus, "/apps//site-enable")
+api.add_resource(AppApiStatus, "/apps//api-enable")
+api.add_resource(AppTraceApi, "/apps//trace")
diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py
index 1de08afa4e08b9..437a6a7b3865b6 100644
--- a/api/controllers/console/app/audio.py
+++ b/api/controllers/console/app/audio.py
@@ -39,7 +39,7 @@ class ChatMessageAudioApi(Resource):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def post(self, app_model):
- file = request.files['file']
+ file = request.files["file"]
try:
response = AudioService.transcript_asr(
@@ -85,31 +85,31 @@ def post(self, app_model):
try:
parser = reqparse.RequestParser()
- parser.add_argument('message_id', type=str, location='json')
- parser.add_argument('text', type=str, location='json')
- parser.add_argument('voice', type=str, location='json')
- parser.add_argument('streaming', type=bool, location='json')
+ parser.add_argument("message_id", type=str, location="json")
+ parser.add_argument("text", type=str, location="json")
+ parser.add_argument("voice", type=str, location="json")
+ parser.add_argument("streaming", type=bool, location="json")
args = parser.parse_args()
- message_id = args.get('message_id', None)
- text = args.get('text', None)
- if (app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
- and app_model.workflow
- and app_model.workflow.features_dict):
- text_to_speech = app_model.workflow.features_dict.get('text_to_speech')
- voice = args.get('voice') if args.get('voice') else text_to_speech.get('voice')
+ message_id = args.get("message_id", None)
+ text = args.get("text", None)
+ if (
+ app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
+ and app_model.workflow
+ and app_model.workflow.features_dict
+ ):
+ text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
+ voice = args.get("voice") if args.get("voice") else text_to_speech.get("voice")
else:
try:
- voice = args.get('voice') if args.get('voice') else app_model.app_model_config.text_to_speech_dict.get(
- 'voice')
+ voice = (
+ args.get("voice")
+ if args.get("voice")
+ else app_model.app_model_config.text_to_speech_dict.get("voice")
+ )
except Exception:
voice = None
- response = AudioService.transcript_tts(
- app_model=app_model,
- text=text,
- message_id=message_id,
- voice=voice
- )
+ response = AudioService.transcript_tts(app_model=app_model, text=text, message_id=message_id, voice=voice)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")
@@ -145,12 +145,12 @@ class TextModesApi(Resource):
def get(self, app_model):
try:
parser = reqparse.RequestParser()
- parser.add_argument('language', type=str, required=True, location='args')
+ parser.add_argument("language", type=str, required=True, location="args")
args = parser.parse_args()
response = AudioService.transcript_tts_voices(
tenant_id=app_model.tenant_id,
- language=args['language'],
+ language=args["language"],
)
return response
@@ -179,6 +179,6 @@ def get(self, app_model):
raise InternalServerError()
-api.add_resource(ChatMessageAudioApi, '/apps//audio-to-text')
-api.add_resource(ChatMessageTextApi, '/apps//text-to-audio')
-api.add_resource(TextModesApi, '/apps//text-to-audio/voices')
+api.add_resource(ChatMessageAudioApi, "/apps//audio-to-text")
+api.add_resource(ChatMessageTextApi, "/apps//text-to-audio")
+api.add_resource(TextModesApi, "/apps//text-to-audio/voices")
diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py
index 61582536fdbe1d..53de51c24d798a 100644
--- a/api/controllers/console/app/completion.py
+++ b/api/controllers/console/app/completion.py
@@ -17,6 +17,7 @@
from controllers.console.app.wraps import get_app_model
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
+from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
@@ -31,37 +32,33 @@
from libs.login import login_required
from models.model import AppMode
from services.app_generate_service import AppGenerateService
+from services.errors.llm import InvokeRateLimitError
# define completion message api for user
class CompletionMessageApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
def post(self, app_model):
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, required=True, location='json')
- parser.add_argument('query', type=str, location='json', default='')
- parser.add_argument('files', type=list, required=False, location='json')
- parser.add_argument('model_config', type=dict, required=True, location='json')
- parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
- parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
+ parser.add_argument("inputs", type=dict, required=True, location="json")
+ parser.add_argument("query", type=str, location="json", default="")
+ parser.add_argument("files", type=list, required=False, location="json")
+ parser.add_argument("model_config", type=dict, required=True, location="json")
+ parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
+ parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
args = parser.parse_args()
- streaming = args['response_mode'] != 'blocking'
- args['auto_generate_name'] = False
+ streaming = args["response_mode"] != "blocking"
+ args["auto_generate_name"] = False
account = flask_login.current_user
try:
response = AppGenerateService.generate(
- app_model=app_model,
- user=account,
- args=args,
- invoke_from=InvokeFrom.DEBUGGER,
- streaming=streaming
+ app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@@ -97,7 +94,7 @@ def post(self, app_model, task_id):
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class ChatMessageApi(Resource):
@@ -107,27 +104,23 @@ class ChatMessageApi(Resource):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT])
def post(self, app_model):
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, required=True, location='json')
- parser.add_argument('query', type=str, required=True, location='json')
- parser.add_argument('files', type=list, required=False, location='json')
- parser.add_argument('model_config', type=dict, required=True, location='json')
- parser.add_argument('conversation_id', type=uuid_value, location='json')
- parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
- parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
+ parser.add_argument("inputs", type=dict, required=True, location="json")
+ parser.add_argument("query", type=str, required=True, location="json")
+ parser.add_argument("files", type=list, required=False, location="json")
+ parser.add_argument("model_config", type=dict, required=True, location="json")
+ parser.add_argument("conversation_id", type=uuid_value, location="json")
+ parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
+ parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
args = parser.parse_args()
- streaming = args['response_mode'] != 'blocking'
- args['auto_generate_name'] = False
+ streaming = args["response_mode"] != "blocking"
+ args["auto_generate_name"] = False
account = flask_login.current_user
try:
response = AppGenerateService.generate(
- app_model=app_model,
- user=account,
- args=args,
- invoke_from=InvokeFrom.DEBUGGER,
- streaming=streaming
+ app_model=app_model, user=account, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=streaming
)
return helper.compact_generate_response(response)
@@ -144,6 +137,8 @@ def post(self, app_model):
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
+ except InvokeRateLimitError as ex:
+ raise InvokeRateLimitHttpError(ex.description)
except InvokeError as e:
raise CompletionRequestError(e.description)
except (ValueError, AppInvokeQuotaExceededError) as e:
@@ -163,10 +158,10 @@ def post(self, app_model, task_id):
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, account.id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
-api.add_resource(CompletionMessageApi, '/apps//completion-messages')
-api.add_resource(CompletionMessageStopApi, '/apps//completion-messages//stop')
-api.add_resource(ChatMessageApi, '/apps//chat-messages')
-api.add_resource(ChatMessageStopApi, '/apps//chat-messages//stop')
+api.add_resource(CompletionMessageApi, "/apps//completion-messages")
+api.add_resource(CompletionMessageStopApi, "/apps//completion-messages//stop")
+api.add_resource(ChatMessageApi, "/apps//chat-messages")
+api.add_resource(ChatMessageStopApi, "/apps//chat-messages//stop")
diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py
index 96cd9a6ea141eb..c3aac6690e4c3d 100644
--- a/api/controllers/console/app/conversation.py
+++ b/api/controllers/console/app/conversation.py
@@ -26,34 +26,32 @@
class CompletionConversationApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_pagination_fields)
def get(self, app_model):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('keyword', type=str, location='args')
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('annotation_status', type=str,
- choices=['annotated', 'not_annotated', 'all'], default='all', location='args')
- parser.add_argument('page', type=int_range(1, 99999), default=1, location='args')
- parser.add_argument('limit', type=int_range(1, 100), default=20, location='args')
+ parser.add_argument("keyword", type=str, location="args")
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument(
+ "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
+ )
+ parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
- query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == 'completion')
+ query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == "completion")
- if args['keyword']:
- query = query.join(
- Message, Message.conversation_id == Conversation.id
- ).filter(
+ if args["keyword"]:
+ query = query.join(Message, Message.conversation_id == Conversation.id).filter(
or_(
- Message.query.ilike('%{}%'.format(args['keyword'])),
- Message.answer.ilike('%{}%'.format(args['keyword']))
+ Message.query.ilike("%{}%".format(args["keyword"])),
+ Message.answer.ilike("%{}%".format(args["keyword"])),
)
)
@@ -61,8 +59,8 @@ def get(self, app_model):
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
@@ -70,8 +68,8 @@ def get(self, app_model):
query = query.where(Conversation.created_at >= start_datetime_utc)
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=59)
end_datetime_timezone = timezone.localize(end_datetime)
@@ -79,36 +77,32 @@ def get(self, app_model):
query = query.where(Conversation.created_at < end_datetime_utc)
- if args['annotation_status'] == "annotated":
+ if args["annotation_status"] == "annotated":
query = query.options(joinedload(Conversation.message_annotations)).join(
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
)
- elif args['annotation_status'] == "not_annotated":
- query = query.outerjoin(
- MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
- ).group_by(Conversation.id).having(func.count(MessageAnnotation.id) == 0)
+ elif args["annotation_status"] == "not_annotated":
+ query = (
+ query.outerjoin(MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id)
+ .group_by(Conversation.id)
+ .having(func.count(MessageAnnotation.id) == 0)
+ )
query = query.order_by(Conversation.created_at.desc())
- conversations = db.paginate(
- query,
- page=args['page'],
- per_page=args['limit'],
- error_out=False
- )
+ conversations = db.paginate(query, page=args["page"], per_page=args["limit"], error_out=False)
return conversations
class CompletionConversationDetailApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_message_detail_fields)
def get(self, app_model, conversation_id):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
@@ -119,12 +113,15 @@ def get(self, app_model, conversation_id):
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
def delete(self, app_model, conversation_id):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
- conversation = db.session.query(Conversation) \
- .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id).first()
+ conversation = (
+ db.session.query(Conversation)
+ .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
+ .first()
+ )
if not conversation:
raise NotFound("Conversation Not Exists.")
@@ -132,34 +129,41 @@ def delete(self, app_model, conversation_id):
conversation.is_deleted = True
db.session.commit()
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class ChatConversationApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_with_summary_pagination_fields)
def get(self, app_model):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('keyword', type=str, location='args')
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('annotation_status', type=str,
- choices=['annotated', 'not_annotated', 'all'], default='all', location='args')
- parser.add_argument('message_count_gte', type=int_range(1, 99999), required=False, location='args')
- parser.add_argument('page', type=int_range(1, 99999), required=False, default=1, location='args')
- parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
+ parser.add_argument("keyword", type=str, location="args")
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument(
+ "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
+ )
+ parser.add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args")
+ parser.add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
+ parser.add_argument(
+ "sort_by",
+ type=str,
+ choices=["created_at", "-created_at", "updated_at", "-updated_at"],
+ required=False,
+ default="-updated_at",
+ location="args",
+ )
args = parser.parse_args()
subquery = (
db.session.query(
- Conversation.id.label('conversation_id'),
- EndUser.session_id.label('from_end_user_session_id')
+ Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id")
)
.outerjoin(EndUser, Conversation.from_end_user_id == EndUser.id)
.subquery()
@@ -167,19 +171,19 @@ def get(self, app_model):
query = db.select(Conversation).where(Conversation.app_id == app_model.id)
- if args['keyword']:
- keyword_filter = '%{}%'.format(args['keyword'])
- query = query.join(
- Message, Message.conversation_id == Conversation.id,
- ).join(
- subquery, subquery.c.conversation_id == Conversation.id
- ).filter(
+ if args["keyword"]:
+ keyword_filter = "%{}%".format(args["keyword"])
+ message_subquery = (
+ db.session.query(Message.conversation_id)
+ .filter(or_(Message.query.ilike(keyword_filter), Message.answer.ilike(keyword_filter)))
+ .subquery()
+ )
+ query = query.join(subquery, subquery.c.conversation_id == Conversation.id).filter(
or_(
- Message.query.ilike(keyword_filter),
- Message.answer.ilike(keyword_filter),
+ Conversation.id.in_(message_subquery),
Conversation.name.ilike(keyword_filter),
Conversation.introduction.ilike(keyword_filter),
- subquery.c.from_end_user_session_id.ilike(keyword_filter)
+ subquery.c.from_end_user_session_id.ilike(keyword_filter),
),
)
@@ -187,8 +191,8 @@ def get(self, app_model):
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
@@ -196,8 +200,8 @@ def get(self, app_model):
query = query.where(Conversation.created_at >= start_datetime_utc)
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=59)
end_datetime_timezone = timezone.localize(end_datetime)
@@ -205,47 +209,53 @@ def get(self, app_model):
query = query.where(Conversation.created_at < end_datetime_utc)
- if args['annotation_status'] == "annotated":
+ if args["annotation_status"] == "annotated":
query = query.options(joinedload(Conversation.message_annotations)).join(
MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
)
- elif args['annotation_status'] == "not_annotated":
- query = query.outerjoin(
- MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
- ).group_by(Conversation.id).having(func.count(MessageAnnotation.id) == 0)
+ elif args["annotation_status"] == "not_annotated":
+ query = (
+ query.outerjoin(MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id)
+ .group_by(Conversation.id)
+ .having(func.count(MessageAnnotation.id) == 0)
+ )
- if args['message_count_gte'] and args['message_count_gte'] >= 1:
+ if args["message_count_gte"] and args["message_count_gte"] >= 1:
query = (
query.options(joinedload(Conversation.messages))
.join(Message, Message.conversation_id == Conversation.id)
.group_by(Conversation.id)
- .having(func.count(Message.id) >= args['message_count_gte'])
+ .having(func.count(Message.id) >= args["message_count_gte"])
)
if app_model.mode == AppMode.ADVANCED_CHAT.value:
query = query.where(Conversation.invoke_from != InvokeFrom.DEBUGGER.value)
- query = query.order_by(Conversation.created_at.desc())
+ match args["sort_by"]:
+ case "created_at":
+ query = query.order_by(Conversation.created_at.asc())
+ case "-created_at":
+ query = query.order_by(Conversation.created_at.desc())
+ case "updated_at":
+ query = query.order_by(Conversation.updated_at.asc())
+ case "-updated_at":
+ query = query.order_by(Conversation.updated_at.desc())
+ case _:
+ query = query.order_by(Conversation.created_at.desc())
- conversations = db.paginate(
- query,
- page=args['page'],
- per_page=args['limit'],
- error_out=False
- )
+ conversations = db.paginate(query, page=args["page"], per_page=args["limit"], error_out=False)
return conversations
class ChatConversationDetailApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_detail_fields)
def get(self, app_model, conversation_id):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
@@ -256,12 +266,15 @@ def get(self, app_model, conversation_id):
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@account_initialization_required
def delete(self, app_model, conversation_id):
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
conversation_id = str(conversation_id)
- conversation = db.session.query(Conversation) \
- .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id).first()
+ conversation = (
+ db.session.query(Conversation)
+ .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
+ .first()
+ )
if not conversation:
raise NotFound("Conversation Not Exists.")
@@ -269,18 +282,21 @@ def delete(self, app_model, conversation_id):
conversation.is_deleted = True
db.session.commit()
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
-api.add_resource(CompletionConversationApi, '/apps//completion-conversations')
-api.add_resource(CompletionConversationDetailApi, '/apps//completion-conversations/')
-api.add_resource(ChatConversationApi, '/apps//chat-conversations')
-api.add_resource(ChatConversationDetailApi, '/apps//chat-conversations/')
+api.add_resource(CompletionConversationApi, "/apps//completion-conversations")
+api.add_resource(CompletionConversationDetailApi, "/apps//completion-conversations/")
+api.add_resource(ChatConversationApi, "/apps//chat-conversations")
+api.add_resource(ChatConversationDetailApi, "/apps//chat-conversations/")
def _get_conversation(app_model, conversation_id):
- conversation = db.session.query(Conversation) \
- .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id).first()
+ conversation = (
+ db.session.query(Conversation)
+ .filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
+ .first()
+ )
if not conversation:
raise NotFound("Conversation Not Exists.")
diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py
new file mode 100644
index 00000000000000..23b234dac9d397
--- /dev/null
+++ b/api/controllers/console/app/conversation_variables.py
@@ -0,0 +1,61 @@
+from flask_restful import Resource, marshal_with, reqparse
+from sqlalchemy import select
+from sqlalchemy.orm import Session
+
+from controllers.console import api
+from controllers.console.app.wraps import get_app_model
+from controllers.console.setup import setup_required
+from controllers.console.wraps import account_initialization_required
+from extensions.ext_database import db
+from fields.conversation_variable_fields import paginated_conversation_variable_fields
+from libs.login import login_required
+from models import ConversationVariable
+from models.model import AppMode
+
+
+class ConversationVariablesApi(Resource):
+ @setup_required
+ @login_required
+ @account_initialization_required
+ @get_app_model(mode=AppMode.ADVANCED_CHAT)
+ @marshal_with(paginated_conversation_variable_fields)
+ def get(self, app_model):
+ parser = reqparse.RequestParser()
+ parser.add_argument("conversation_id", type=str, location="args")
+ args = parser.parse_args()
+
+ stmt = (
+ select(ConversationVariable)
+ .where(ConversationVariable.app_id == app_model.id)
+ .order_by(ConversationVariable.created_at)
+ )
+ if args["conversation_id"]:
+ stmt = stmt.where(ConversationVariable.conversation_id == args["conversation_id"])
+ else:
+ raise ValueError("conversation_id is required")
+
+ # NOTE: This is a temporary solution to avoid performance issues.
+ page = 1
+ page_size = 100
+ stmt = stmt.limit(page_size).offset((page - 1) * page_size)
+
+ with Session(db.engine) as session:
+ rows = session.scalars(stmt).all()
+
+ return {
+ "page": page,
+ "limit": page_size,
+ "total": len(rows),
+ "has_more": False,
+ "data": [
+ {
+ "created_at": row.created_at,
+ "updated_at": row.updated_at,
+ **row.to_variable().model_dump(),
+ }
+ for row in rows
+ ],
+ }
+
+
+api.add_resource(ConversationVariablesApi, "/apps//conversation-variables")
diff --git a/api/controllers/console/app/error.py b/api/controllers/console/app/error.py
index f6feed12217a85..1559f82d6ea142 100644
--- a/api/controllers/console/app/error.py
+++ b/api/controllers/console/app/error.py
@@ -2,116 +2,128 @@
class AppNotFoundError(BaseHTTPException):
- error_code = 'app_not_found'
+ error_code = "app_not_found"
description = "App not found."
code = 404
class ProviderNotInitializeError(BaseHTTPException):
- error_code = 'provider_not_initialize'
- description = "No valid model provider credentials found. " \
- "Please go to Settings -> Model Provider to complete your provider credentials."
+ error_code = "provider_not_initialize"
+ description = (
+ "No valid model provider credentials found. "
+ "Please go to Settings -> Model Provider to complete your provider credentials."
+ )
code = 400
class ProviderQuotaExceededError(BaseHTTPException):
- error_code = 'provider_quota_exceeded'
- description = "Your quota for Dify Hosted Model Provider has been exhausted. " \
- "Please go to Settings -> Model Provider to complete your own provider credentials."
+ error_code = "provider_quota_exceeded"
+ description = (
+ "Your quota for Dify Hosted Model Provider has been exhausted. "
+ "Please go to Settings -> Model Provider to complete your own provider credentials."
+ )
code = 400
class ProviderModelCurrentlyNotSupportError(BaseHTTPException):
- error_code = 'model_currently_not_support'
+ error_code = "model_currently_not_support"
description = "Dify Hosted OpenAI trial currently not support the GPT-4 model."
code = 400
class ConversationCompletedError(BaseHTTPException):
- error_code = 'conversation_completed'
+ error_code = "conversation_completed"
description = "The conversation has ended. Please start a new conversation."
code = 400
class AppUnavailableError(BaseHTTPException):
- error_code = 'app_unavailable'
+ error_code = "app_unavailable"
description = "App unavailable, please check your app configurations."
code = 400
class CompletionRequestError(BaseHTTPException):
- error_code = 'completion_request_error'
+ error_code = "completion_request_error"
description = "Completion request failed."
code = 400
class AppMoreLikeThisDisabledError(BaseHTTPException):
- error_code = 'app_more_like_this_disabled'
+ error_code = "app_more_like_this_disabled"
description = "The 'More like this' feature is disabled. Please refresh your page."
code = 403
class NoAudioUploadedError(BaseHTTPException):
- error_code = 'no_audio_uploaded'
+ error_code = "no_audio_uploaded"
description = "Please upload your audio."
code = 400
class AudioTooLargeError(BaseHTTPException):
- error_code = 'audio_too_large'
+ error_code = "audio_too_large"
description = "Audio size exceeded. {message}"
code = 413
class UnsupportedAudioTypeError(BaseHTTPException):
- error_code = 'unsupported_audio_type'
+ error_code = "unsupported_audio_type"
description = "Audio type not allowed."
code = 415
class ProviderNotSupportSpeechToTextError(BaseHTTPException):
- error_code = 'provider_not_support_speech_to_text'
+ error_code = "provider_not_support_speech_to_text"
description = "Provider not support speech to text."
code = 400
class NoFileUploadedError(BaseHTTPException):
- error_code = 'no_file_uploaded'
+ error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400
class TooManyFilesError(BaseHTTPException):
- error_code = 'too_many_files'
+ error_code = "too_many_files"
description = "Only one file is allowed."
code = 400
class DraftWorkflowNotExist(BaseHTTPException):
- error_code = 'draft_workflow_not_exist'
+ error_code = "draft_workflow_not_exist"
description = "Draft workflow need to be initialized."
code = 400
class DraftWorkflowNotSync(BaseHTTPException):
- error_code = 'draft_workflow_not_sync'
+ error_code = "draft_workflow_not_sync"
description = "Workflow graph might have been modified, please refresh and resubmit."
code = 400
class TracingConfigNotExist(BaseHTTPException):
- error_code = 'trace_config_not_exist'
+ error_code = "trace_config_not_exist"
description = "Trace config not exist."
code = 400
class TracingConfigIsExist(BaseHTTPException):
- error_code = 'trace_config_is_exist'
+ error_code = "trace_config_is_exist"
description = "Trace config is exist."
code = 400
class TracingConfigCheckError(BaseHTTPException):
- error_code = 'trace_config_check_error'
+ error_code = "trace_config_check_error"
description = "Invalid Credentials."
code = 400
+
+
+class InvokeRateLimitError(BaseHTTPException):
+ """Raised when the Invoke returns rate limit error."""
+
+ error_code = "rate_limit_error"
+ description = "Rate Limit Error"
+ code = 429
diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py
index 6803775e20dfb8..3d1e6b7a37cacd 100644
--- a/api/controllers/console/app/generator.py
+++ b/api/controllers/console/app/generator.py
@@ -24,21 +24,21 @@ class RuleGenerateApi(Resource):
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('instruction', type=str, required=True, nullable=False, location='json')
- parser.add_argument('model_config', type=dict, required=True, nullable=False, location='json')
- parser.add_argument('no_variable', type=bool, required=True, default=False, location='json')
+ parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
+ parser.add_argument("no_variable", type=bool, required=True, default=False, location="json")
args = parser.parse_args()
account = current_user
- PROMPT_GENERATION_MAX_TOKENS = int(os.getenv('PROMPT_GENERATION_MAX_TOKENS', '512'))
+ PROMPT_GENERATION_MAX_TOKENS = int(os.getenv("PROMPT_GENERATION_MAX_TOKENS", "512"))
try:
rules = LLMGenerator.generate_rule_config(
tenant_id=account.current_tenant_id,
- instruction=args['instruction'],
- model_config=args['model_config'],
- no_variable=args['no_variable'],
- rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS
+ instruction=args["instruction"],
+ model_config=args["model_config"],
+ no_variable=args["no_variable"],
+ rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS,
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -52,4 +52,4 @@ def post(self):
return rules
-api.add_resource(RuleGenerateApi, '/rule-generate')
+api.add_resource(RuleGenerateApi, "/rule-generate")
diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py
index 636c071795940b..fe06201982374a 100644
--- a/api/controllers/console/app/message.py
+++ b/api/controllers/console/app/message.py
@@ -33,9 +33,9 @@
class ChatMessageListApi(Resource):
message_infinite_scroll_pagination_fields = {
- 'limit': fields.Integer,
- 'has_more': fields.Boolean,
- 'data': fields.List(fields.Nested(message_detail_fields))
+ "limit": fields.Integer,
+ "has_more": fields.Boolean,
+ "data": fields.List(fields.Nested(message_detail_fields)),
}
@setup_required
@@ -45,55 +45,69 @@ class ChatMessageListApi(Resource):
@marshal_with(message_infinite_scroll_pagination_fields)
def get(self, app_model):
parser = reqparse.RequestParser()
- parser.add_argument('conversation_id', required=True, type=uuid_value, location='args')
- parser.add_argument('first_id', type=uuid_value, location='args')
- parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
+ parser.add_argument("conversation_id", required=True, type=uuid_value, location="args")
+ parser.add_argument("first_id", type=uuid_value, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
- conversation = db.session.query(Conversation).filter(
- Conversation.id == args['conversation_id'],
- Conversation.app_id == app_model.id
- ).first()
+ conversation = (
+ db.session.query(Conversation)
+ .filter(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id)
+ .first()
+ )
if not conversation:
raise NotFound("Conversation Not Exists.")
- if args['first_id']:
- first_message = db.session.query(Message) \
- .filter(Message.conversation_id == conversation.id, Message.id == args['first_id']).first()
+ if args["first_id"]:
+ first_message = (
+ db.session.query(Message)
+ .filter(Message.conversation_id == conversation.id, Message.id == args["first_id"])
+ .first()
+ )
if not first_message:
raise NotFound("First message not found")
- history_messages = db.session.query(Message).filter(
- Message.conversation_id == conversation.id,
- Message.created_at < first_message.created_at,
- Message.id != first_message.id
- ) \
- .order_by(Message.created_at.desc()).limit(args['limit']).all()
+ history_messages = (
+ db.session.query(Message)
+ .filter(
+ Message.conversation_id == conversation.id,
+ Message.created_at < first_message.created_at,
+ Message.id != first_message.id,
+ )
+ .order_by(Message.created_at.desc())
+ .limit(args["limit"])
+ .all()
+ )
else:
- history_messages = db.session.query(Message).filter(Message.conversation_id == conversation.id) \
- .order_by(Message.created_at.desc()).limit(args['limit']).all()
+ history_messages = (
+ db.session.query(Message)
+ .filter(Message.conversation_id == conversation.id)
+ .order_by(Message.created_at.desc())
+ .limit(args["limit"])
+ .all()
+ )
has_more = False
- if len(history_messages) == args['limit']:
+ if len(history_messages) == args["limit"]:
current_page_first_message = history_messages[-1]
- rest_count = db.session.query(Message).filter(
- Message.conversation_id == conversation.id,
- Message.created_at < current_page_first_message.created_at,
- Message.id != current_page_first_message.id
- ).count()
+ rest_count = (
+ db.session.query(Message)
+ .filter(
+ Message.conversation_id == conversation.id,
+ Message.created_at < current_page_first_message.created_at,
+ Message.id != current_page_first_message.id,
+ )
+ .count()
+ )
if rest_count > 0:
has_more = True
history_messages = list(reversed(history_messages))
- return InfiniteScrollPagination(
- data=history_messages,
- limit=args['limit'],
- has_more=has_more
- )
+ return InfiniteScrollPagination(data=history_messages, limit=args["limit"], has_more=has_more)
class MessageFeedbackApi(Resource):
@@ -103,61 +117,57 @@ class MessageFeedbackApi(Resource):
@get_app_model
def post(self, app_model):
parser = reqparse.RequestParser()
- parser.add_argument('message_id', required=True, type=uuid_value, location='json')
- parser.add_argument('rating', type=str, choices=['like', 'dislike', None], location='json')
+ parser.add_argument("message_id", required=True, type=uuid_value, location="json")
+ parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json")
args = parser.parse_args()
- message_id = str(args['message_id'])
+ message_id = str(args["message_id"])
- message = db.session.query(Message).filter(
- Message.id == message_id,
- Message.app_id == app_model.id
- ).first()
+ message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first()
if not message:
raise NotFound("Message Not Exists.")
feedback = message.admin_feedback
- if not args['rating'] and feedback:
+ if not args["rating"] and feedback:
db.session.delete(feedback)
- elif args['rating'] and feedback:
- feedback.rating = args['rating']
- elif not args['rating'] and not feedback:
- raise ValueError('rating cannot be None when feedback not exists')
+ elif args["rating"] and feedback:
+ feedback.rating = args["rating"]
+ elif not args["rating"] and not feedback:
+ raise ValueError("rating cannot be None when feedback not exists")
else:
feedback = MessageFeedback(
app_id=app_model.id,
conversation_id=message.conversation_id,
message_id=message.id,
- rating=args['rating'],
- from_source='admin',
- from_account_id=current_user.id
+ rating=args["rating"],
+ from_source="admin",
+ from_account_id=current_user.id,
)
db.session.add(feedback)
db.session.commit()
- return {'result': 'success'}
+ return {"result": "success"}
class MessageAnnotationApi(Resource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('annotation')
+ @cloud_edition_billing_resource_check("annotation")
@get_app_model
@marshal_with(annotation_fields)
def post(self, app_model):
- # The role of the current user in the ta table must be admin or owner
- if not current_user.is_admin_or_owner:
+ if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('message_id', required=False, type=uuid_value, location='json')
- parser.add_argument('question', required=True, type=str, location='json')
- parser.add_argument('answer', required=True, type=str, location='json')
- parser.add_argument('annotation_reply', required=False, type=dict, location='json')
+ parser.add_argument("message_id", required=False, type=uuid_value, location="json")
+ parser.add_argument("question", required=True, type=str, location="json")
+ parser.add_argument("answer", required=True, type=str, location="json")
+ parser.add_argument("annotation_reply", required=False, type=dict, location="json")
args = parser.parse_args()
annotation = AppAnnotationService.up_insert_app_annotation_from_message(args, app_model.id)
@@ -170,11 +180,9 @@ class MessageAnnotationCountApi(Resource):
@account_initialization_required
@get_app_model
def get(self, app_model):
- count = db.session.query(MessageAnnotation).filter(
- MessageAnnotation.app_id == app_model.id
- ).count()
+ count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_model.id).count()
- return {'count': count}
+ return {"count": count}
class MessageSuggestedQuestionApi(Resource):
@@ -187,10 +195,7 @@ def get(self, app_model, message_id):
try:
questions = MessageService.get_suggested_questions_after_answer(
- app_model=app_model,
- message_id=message_id,
- user=current_user,
- invoke_from=InvokeFrom.DEBUGGER
+ app_model=app_model, message_id=message_id, user=current_user, invoke_from=InvokeFrom.DEBUGGER
)
except MessageNotExistsError:
raise NotFound("Message not found")
@@ -210,7 +215,7 @@ def get(self, app_model, message_id):
logging.exception("internal server error.")
raise InternalServerError()
- return {'data': questions}
+ return {"data": questions}
class MessageApi(Resource):
@@ -222,10 +227,7 @@ class MessageApi(Resource):
def get(self, app_model, message_id):
message_id = str(message_id)
- message = db.session.query(Message).filter(
- Message.id == message_id,
- Message.app_id == app_model.id
- ).first()
+ message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first()
if not message:
raise NotFound("Message Not Exists.")
@@ -233,9 +235,9 @@ def get(self, app_model, message_id):
return message
-api.add_resource(MessageSuggestedQuestionApi, '/apps//chat-messages//suggested-questions')
-api.add_resource(ChatMessageListApi, '/apps//chat-messages', endpoint='console_chat_messages')
-api.add_resource(MessageFeedbackApi, '/apps//feedbacks')
-api.add_resource(MessageAnnotationApi, '/apps//annotations')
-api.add_resource(MessageAnnotationCountApi, '/apps//annotations/count')
-api.add_resource(MessageApi, '/apps//messages/', endpoint='console_message')
+api.add_resource(MessageSuggestedQuestionApi, "/apps//chat-messages//suggested-questions")
+api.add_resource(ChatMessageListApi, "/apps//chat-messages", endpoint="console_chat_messages")
+api.add_resource(MessageFeedbackApi, "/apps//feedbacks")
+api.add_resource(MessageAnnotationApi, "/apps//annotations")
+api.add_resource(MessageAnnotationCountApi, "/apps//annotations/count")
+api.add_resource(MessageApi, "/apps//messages/", endpoint="console_message")
diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py
index c8df879a29ca42..f5068a4cd8fcab 100644
--- a/api/controllers/console/app/model_config.py
+++ b/api/controllers/console/app/model_config.py
@@ -19,37 +19,35 @@
class ModelConfigResource(Resource):
-
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION])
def post(self, app_model):
-
"""Modify app model config"""
# validate config
model_configuration = AppModelConfigService.validate_configuration(
- tenant_id=current_user.current_tenant_id,
- config=request.json,
- app_mode=AppMode.value_of(app_model.mode)
+ tenant_id=current_user.current_tenant_id, config=request.json, app_mode=AppMode.value_of(app_model.mode)
)
new_app_model_config = AppModelConfig(
app_id=app_model.id,
+ created_by=current_user.id,
+ updated_by=current_user.id,
)
new_app_model_config = new_app_model_config.from_model_config_dict(model_configuration)
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
# get original app model config
- original_app_model_config: AppModelConfig = db.session.query(AppModelConfig).filter(
- AppModelConfig.id == app_model.app_model_config_id
- ).first()
+ original_app_model_config: AppModelConfig = (
+ db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
+ )
agent_mode = original_app_model_config.agent_mode_dict
# decrypt agent tool parameters if it's secret-input
parameter_map = {}
masked_parameter_map = {}
tool_map = {}
- for tool in agent_mode.get('tools') or []:
+ for tool in agent_mode.get("tools") or []:
if not isinstance(tool, dict) or len(tool.keys()) <= 3:
continue
@@ -66,7 +64,7 @@ def post(self, app_model):
tool_runtime=tool_runtime,
provider_name=agent_tool_entity.provider_id,
provider_type=agent_tool_entity.provider_type,
- identity_id=f'AGENT.{app_model.id}'
+ identity_id=f"AGENT.{app_model.id}",
)
except Exception as e:
continue
@@ -79,18 +77,18 @@ def post(self, app_model):
parameters = {}
masked_parameter = {}
- key = f'{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}'
+ key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}"
masked_parameter_map[key] = masked_parameter
parameter_map[key] = parameters
tool_map[key] = tool_runtime
# encrypt agent tool parameters if it's secret-input
agent_mode = new_app_model_config.agent_mode_dict
- for tool in agent_mode.get('tools') or []:
+ for tool in agent_mode.get("tools") or []:
agent_tool_entity = AgentToolEntity(**tool)
# get tool
- key = f'{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}'
+ key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}"
if key in tool_map:
tool_runtime = tool_map[key]
else:
@@ -108,7 +106,7 @@ def post(self, app_model):
tool_runtime=tool_runtime,
provider_name=agent_tool_entity.provider_id,
provider_type=agent_tool_entity.provider_type,
- identity_id=f'AGENT.{app_model.id}'
+ identity_id=f"AGENT.{app_model.id}",
)
manager.delete_tool_parameters_cache()
@@ -116,15 +114,17 @@ def post(self, app_model):
if agent_tool_entity.tool_parameters:
if key not in masked_parameter_map:
continue
-
+
for masked_key, masked_value in masked_parameter_map[key].items():
- if masked_key in agent_tool_entity.tool_parameters and \
- agent_tool_entity.tool_parameters[masked_key] == masked_value:
+ if (
+ masked_key in agent_tool_entity.tool_parameters
+ and agent_tool_entity.tool_parameters[masked_key] == masked_value
+ ):
agent_tool_entity.tool_parameters[masked_key] = parameter_map[key].get(masked_key)
# encrypt parameters
if agent_tool_entity.tool_parameters:
- tool['tool_parameters'] = manager.encrypt_tool_parameters(agent_tool_entity.tool_parameters or {})
+ tool["tool_parameters"] = manager.encrypt_tool_parameters(agent_tool_entity.tool_parameters or {})
# update app model config
new_app_model_config.agent_mode = json.dumps(agent_mode)
@@ -135,12 +135,9 @@ def post(self, app_model):
app_model.app_model_config_id = new_app_model_config.id
db.session.commit()
- app_model_config_was_updated.send(
- app_model,
- app_model_config=new_app_model_config
- )
+ app_model_config_was_updated.send(app_model, app_model_config=new_app_model_config)
- return {'result': 'success'}
+ return {"result": "success"}
-api.add_resource(ModelConfigResource, '/apps//model-config')
+api.add_resource(ModelConfigResource, "/apps//model-config")
diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py
index c0cf7b9e33f32b..374bd2b8157027 100644
--- a/api/controllers/console/app/ops_trace.py
+++ b/api/controllers/console/app/ops_trace.py
@@ -18,13 +18,11 @@ class TraceAppConfigApi(Resource):
@account_initialization_required
def get(self, app_id):
parser = reqparse.RequestParser()
- parser.add_argument('tracing_provider', type=str, required=True, location='args')
+ parser.add_argument("tracing_provider", type=str, required=True, location="args")
args = parser.parse_args()
try:
- trace_config = OpsService.get_tracing_app_config(
- app_id=app_id, tracing_provider=args['tracing_provider']
- )
+ trace_config = OpsService.get_tracing_app_config(app_id=app_id, tracing_provider=args["tracing_provider"])
if not trace_config:
return {"has_not_configured": True}
return trace_config
@@ -37,19 +35,17 @@ def get(self, app_id):
def post(self, app_id):
"""Create a new trace app configuration"""
parser = reqparse.RequestParser()
- parser.add_argument('tracing_provider', type=str, required=True, location='json')
- parser.add_argument('tracing_config', type=dict, required=True, location='json')
+ parser.add_argument("tracing_provider", type=str, required=True, location="json")
+ parser.add_argument("tracing_config", type=dict, required=True, location="json")
args = parser.parse_args()
try:
result = OpsService.create_tracing_app_config(
- app_id=app_id,
- tracing_provider=args['tracing_provider'],
- tracing_config=args['tracing_config']
+ app_id=app_id, tracing_provider=args["tracing_provider"], tracing_config=args["tracing_config"]
)
if not result:
raise TracingConfigIsExist()
- if result.get('error'):
+ if result.get("error"):
raise TracingConfigCheckError()
return result
except Exception as e:
@@ -61,15 +57,13 @@ def post(self, app_id):
def patch(self, app_id):
"""Update an existing trace app configuration"""
parser = reqparse.RequestParser()
- parser.add_argument('tracing_provider', type=str, required=True, location='json')
- parser.add_argument('tracing_config', type=dict, required=True, location='json')
+ parser.add_argument("tracing_provider", type=str, required=True, location="json")
+ parser.add_argument("tracing_config", type=dict, required=True, location="json")
args = parser.parse_args()
try:
result = OpsService.update_tracing_app_config(
- app_id=app_id,
- tracing_provider=args['tracing_provider'],
- tracing_config=args['tracing_config']
+ app_id=app_id, tracing_provider=args["tracing_provider"], tracing_config=args["tracing_config"]
)
if not result:
raise TracingConfigNotExist()
@@ -83,14 +77,11 @@ def patch(self, app_id):
def delete(self, app_id):
"""Delete an existing trace app configuration"""
parser = reqparse.RequestParser()
- parser.add_argument('tracing_provider', type=str, required=True, location='args')
+ parser.add_argument("tracing_provider", type=str, required=True, location="args")
args = parser.parse_args()
try:
- result = OpsService.delete_tracing_app_config(
- app_id=app_id,
- tracing_provider=args['tracing_provider']
- )
+ result = OpsService.delete_tracing_app_config(app_id=app_id, tracing_provider=args["tracing_provider"])
if not result:
raise TracingConfigNotExist()
return {"result": "success"}
@@ -98,4 +89,4 @@ def delete(self, app_id):
raise e
-api.add_resource(TraceAppConfigApi, '/apps//trace-config')
+api.add_resource(TraceAppConfigApi, "/apps//trace-config")
diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py
index 6aa9f0b475f161..f936642acd1d4e 100644
--- a/api/controllers/console/app/site.py
+++ b/api/controllers/console/app/site.py
@@ -1,3 +1,5 @@
+from datetime import datetime, timezone
+
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
@@ -15,22 +17,23 @@
def parse_app_site_args():
parser = reqparse.RequestParser()
- parser.add_argument('title', type=str, required=False, location='json')
- parser.add_argument('icon', type=str, required=False, location='json')
- parser.add_argument('icon_background', type=str, required=False, location='json')
- parser.add_argument('description', type=str, required=False, location='json')
- parser.add_argument('default_language', type=supported_language, required=False, location='json')
- parser.add_argument('chat_color_theme', type=str, required=False, location='json')
- parser.add_argument('chat_color_theme_inverted', type=bool, required=False, location='json')
- parser.add_argument('customize_domain', type=str, required=False, location='json')
- parser.add_argument('copyright', type=str, required=False, location='json')
- parser.add_argument('privacy_policy', type=str, required=False, location='json')
- parser.add_argument('custom_disclaimer', type=str, required=False, location='json')
- parser.add_argument('customize_token_strategy', type=str, choices=['must', 'allow', 'not_allow'],
- required=False,
- location='json')
- parser.add_argument('prompt_public', type=bool, required=False, location='json')
- parser.add_argument('show_workflow_steps', type=bool, required=False, location='json')
+ parser.add_argument("title", type=str, required=False, location="json")
+ parser.add_argument("icon_type", type=str, required=False, location="json")
+ parser.add_argument("icon", type=str, required=False, location="json")
+ parser.add_argument("icon_background", type=str, required=False, location="json")
+ parser.add_argument("description", type=str, required=False, location="json")
+ parser.add_argument("default_language", type=supported_language, required=False, location="json")
+ parser.add_argument("chat_color_theme", type=str, required=False, location="json")
+ parser.add_argument("chat_color_theme_inverted", type=bool, required=False, location="json")
+ parser.add_argument("customize_domain", type=str, required=False, location="json")
+ parser.add_argument("copyright", type=str, required=False, location="json")
+ parser.add_argument("privacy_policy", type=str, required=False, location="json")
+ parser.add_argument("custom_disclaimer", type=str, required=False, location="json")
+ parser.add_argument(
+ "customize_token_strategy", type=str, choices=["must", "allow", "not_allow"], required=False, location="json"
+ )
+ parser.add_argument("prompt_public", type=bool, required=False, location="json")
+ parser.add_argument("show_workflow_steps", type=bool, required=False, location="json")
return parser.parse_args()
@@ -47,37 +50,37 @@ def post(self, app_model):
if not current_user.is_editor:
raise Forbidden()
- site = db.session.query(Site). \
- filter(Site.app_id == app_model.id). \
- one_or_404()
+ site = db.session.query(Site).filter(Site.app_id == app_model.id).one_or_404()
for attr_name in [
- 'title',
- 'icon',
- 'icon_background',
- 'description',
- 'default_language',
- 'chat_color_theme',
- 'chat_color_theme_inverted',
- 'customize_domain',
- 'copyright',
- 'privacy_policy',
- 'custom_disclaimer',
- 'customize_token_strategy',
- 'prompt_public',
- 'show_workflow_steps'
+ "title",
+ "icon_type",
+ "icon",
+ "icon_background",
+ "description",
+ "default_language",
+ "chat_color_theme",
+ "chat_color_theme_inverted",
+ "customize_domain",
+ "copyright",
+ "privacy_policy",
+ "custom_disclaimer",
+ "customize_token_strategy",
+ "prompt_public",
+ "show_workflow_steps",
]:
value = args.get(attr_name)
if value is not None:
setattr(site, attr_name, value)
+ site.updated_by = current_user.id
+ site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit()
return site
class AppSiteAccessTokenReset(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -94,10 +97,12 @@ def post(self, app_model):
raise NotFound
site.code = Site.generate_code(16)
+ site.updated_by = current_user.id
+ site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit()
return site
-api.add_resource(AppSite, '/apps//site')
-api.add_resource(AppSiteAccessTokenReset, '/apps//site/access-token-reset')
+api.add_resource(AppSite, "/apps//site")
+api.add_resource(AppSiteAccessTokenReset, "/apps//site/access-token-reset")
diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py
index b882ffef34129e..81826a20d05544 100644
--- a/api/controllers/console/app/statistic.py
+++ b/api/controllers/console/app/statistic.py
@@ -16,8 +16,61 @@
from models.model import AppMode
-class DailyConversationStatistic(Resource):
+class DailyMessageStatistic(Resource):
+ @setup_required
+ @login_required
+ @account_initialization_required
+ @get_app_model
+ def get(self, app_model):
+ account = current_user
+
+ parser = reqparse.RequestParser()
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ args = parser.parse_args()
+
+ sql_query = """
+ SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(*) AS message_count
+ FROM messages where app_id = :app_id
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
+
+ timezone = pytz.timezone(account.timezone)
+ utc_timezone = pytz.utc
+
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
+ start_datetime = start_datetime.replace(second=0)
+
+ start_datetime_timezone = timezone.localize(start_datetime)
+ start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
+
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
+
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
+ end_datetime = end_datetime.replace(second=0)
+
+ end_datetime_timezone = timezone.localize(end_datetime)
+ end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
+
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
+
+ sql_query += " GROUP BY date order by date"
+
+ response_data = []
+
+ with db.engine.begin() as conn:
+ rs = conn.execute(db.text(sql_query), arg_dict)
+ for i in rs:
+ response_data.append({"date": str(i.date), "message_count": i.message_count})
+
+ return jsonify({"data": response_data})
+
+class DailyConversationStatistic(Resource):
@setup_required
@login_required
@account_initialization_required
@@ -26,58 +79,52 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct messages.conversation_id) AS conversation_count
FROM messages where app_id = :app_id
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'conversation_count': i.conversation_count
- })
+ response_data.append({"date": str(i.date), "conversation_count": i.conversation_count})
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class DailyTerminalsStatistic(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -86,54 +133,49 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct messages.from_end_user_id) AS terminal_count
FROM messages where app_id = :app_id
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
- rs = conn.execute(db.text(sql_query), arg_dict)
+ rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'terminal_count': i.terminal_count
- })
+ response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class DailyTokenCostStatistic(Resource):
@@ -145,58 +187,53 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
(sum(messages.message_tokens) + sum(messages.answer_tokens)) as token_count,
sum(total_price) as total_price
FROM messages where app_id = :app_id
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'token_count': i.token_count,
- 'total_price': i.total_price,
- 'currency': 'USD'
- })
+ response_data.append(
+ {"date": str(i.date), "token_count": i.token_count, "total_price": i.total_price, "currency": "USD"}
+ )
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class AverageSessionInteractionStatistic(Resource):
@@ -208,8 +245,8 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT date(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
@@ -218,30 +255,30 @@ def get(self, app_model):
FROM conversations c
JOIN messages m ON c.id = m.conversation_id
WHERE c.override_model_configs IS NULL AND c.app_id = :app_id"""
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and c.created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and c.created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and c.created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and c.created_at < :end"
+ arg_dict["end"] = end_datetime_utc
sql_query += """
GROUP BY m.conversation_id) subquery
@@ -250,18 +287,15 @@ def get(self, app_model):
ORDER BY date"""
response_data = []
-
+
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'interactions': float(i.interactions.quantize(Decimal('0.01')))
- })
+ response_data.append(
+ {"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
+ )
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class UserSatisfactionRateStatistic(Resource):
@@ -273,57 +307,57 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(m.id) as message_count, COUNT(mf.id) as feedback_count
FROM messages m
LEFT JOIN message_feedbacks mf on mf.message_id=m.id and mf.rating='like'
WHERE m.app_id = :app_id
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and m.created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and m.created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and m.created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and m.created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'rate': round((i.feedback_count * 1000 / i.message_count) if i.message_count > 0 else 0, 2),
- })
+ response_data.append(
+ {
+ "date": str(i.date),
+ "rate": round((i.feedback_count * 1000 / i.message_count) if i.message_count > 0 else 0, 2),
+ }
+ )
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class AverageResponseTimeStatistic(Resource):
@@ -335,56 +369,51 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
AVG(provider_response_latency) as latency
FROM messages
WHERE app_id = :app_id
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+ """
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
- rs = conn.execute(db.text(sql_query), arg_dict)
+ rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'latency': round(i.latency * 1000, 4)
- })
+ response_data.append({"date": str(i.date), "latency": round(i.latency * 1000, 4)})
- return jsonify({
- 'data': response_data
- })
+ return jsonify({"data": response_data})
class TokensPerSecondStatistic(Resource):
@@ -396,63 +425,59 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
+ sql_query = """SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
CASE
WHEN SUM(provider_response_latency) = 0 THEN 0
ELSE (SUM(answer_tokens) / SUM(provider_response_latency))
END as tokens_per_second
FROM messages
-WHERE app_id = :app_id'''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id}
+WHERE app_id = :app_id"""
+ arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'tps': round(i.tokens_per_second, 4)
- })
-
- return jsonify({
- 'data': response_data
- })
-
-
-api.add_resource(DailyConversationStatistic, '/apps//statistics/daily-conversations')
-api.add_resource(DailyTerminalsStatistic, '/apps//statistics/daily-end-users')
-api.add_resource(DailyTokenCostStatistic, '/apps//statistics/token-costs')
-api.add_resource(AverageSessionInteractionStatistic, '/apps//statistics/average-session-interactions')
-api.add_resource(UserSatisfactionRateStatistic, '/apps//statistics/user-satisfaction-rate')
-api.add_resource(AverageResponseTimeStatistic, '/apps//statistics/average-response-time')
-api.add_resource(TokensPerSecondStatistic, '/apps//statistics/tokens-per-second')
+ response_data.append({"date": str(i.date), "tps": round(i.tokens_per_second, 4)})
+
+ return jsonify({"data": response_data})
+
+
+api.add_resource(DailyMessageStatistic, "/apps//statistics/daily-messages")
+api.add_resource(DailyConversationStatistic, "/apps//statistics/daily-conversations")
+api.add_resource(DailyTerminalsStatistic, "/apps//statistics/daily-end-users")
+api.add_resource(DailyTokenCostStatistic, "/apps//statistics/token-costs")
+api.add_resource(AverageSessionInteractionStatistic, "/apps//statistics/average-session-interactions")
+api.add_resource(UserSatisfactionRateStatistic, "/apps//statistics/user-satisfaction-rate")
+api.add_resource(AverageResponseTimeStatistic, "/apps//statistics/average-response-time")
+api.add_resource(TokensPerSecondStatistic, "/apps//statistics/tokens-per-second")
diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py
index 686ef7b4bebaaa..e44820f6345c48 100644
--- a/api/controllers/console/app/workflow.py
+++ b/api/controllers/console/app/workflow.py
@@ -64,49 +64,54 @@ def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
- content_type = request.headers.get('Content-Type', '')
- if 'application/json' in content_type:
+ content_type = request.headers.get("Content-Type", "")
+
+ if "application/json" in content_type:
parser = reqparse.RequestParser()
- parser.add_argument('graph', type=dict, required=True, nullable=False, location='json')
- parser.add_argument('features', type=dict, required=True, nullable=False, location='json')
- parser.add_argument('hash', type=str, required=False, location='json')
+ parser.add_argument("graph", type=dict, required=True, nullable=False, location="json")
+ parser.add_argument("features", type=dict, required=True, nullable=False, location="json")
+ parser.add_argument("hash", type=str, required=False, location="json")
# TODO: set this to required=True after frontend is updated
- parser.add_argument('environment_variables', type=list, required=False, location='json')
+ parser.add_argument("environment_variables", type=list, required=False, location="json")
+ parser.add_argument("conversation_variables", type=list, required=False, location="json")
args = parser.parse_args()
- elif 'text/plain' in content_type:
+ elif "text/plain" in content_type:
try:
- data = json.loads(request.data.decode('utf-8'))
- if 'graph' not in data or 'features' not in data:
- raise ValueError('graph or features not found in data')
+ data = json.loads(request.data.decode("utf-8"))
+ if "graph" not in data or "features" not in data:
+ raise ValueError("graph or features not found in data")
- if not isinstance(data.get('graph'), dict) or not isinstance(data.get('features'), dict):
- raise ValueError('graph or features is not a dict')
+ if not isinstance(data.get("graph"), dict) or not isinstance(data.get("features"), dict):
+ raise ValueError("graph or features is not a dict")
args = {
- 'graph': data.get('graph'),
- 'features': data.get('features'),
- 'hash': data.get('hash'),
- 'environment_variables': data.get('environment_variables')
+ "graph": data.get("graph"),
+ "features": data.get("features"),
+ "hash": data.get("hash"),
+ "environment_variables": data.get("environment_variables"),
+ "conversation_variables": data.get("conversation_variables"),
}
except json.JSONDecodeError:
- return {'message': 'Invalid JSON data'}, 400
+ return {"message": "Invalid JSON data"}, 400
else:
abort(415)
workflow_service = WorkflowService()
try:
- environment_variables_list = args.get('environment_variables') or []
+ environment_variables_list = args.get("environment_variables") or []
environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list]
+ conversation_variables_list = args.get("conversation_variables") or []
+ conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list]
workflow = workflow_service.sync_draft_workflow(
app_model=app_model,
- graph=args['graph'],
- features=args['features'],
- unique_hash=args.get('hash'),
+ graph=args["graph"],
+ features=args["features"],
+ unique_hash=args.get("hash"),
account=current_user,
environment_variables=environment_variables,
+ conversation_variables=conversation_variables,
)
except WorkflowHashNotEqualError:
raise DraftWorkflowNotSync()
@@ -114,7 +119,7 @@ def post(self, app_model: App):
return {
"result": "success",
"hash": workflow.unique_hash,
- "updated_at": TimestampField().format(workflow.updated_at or workflow.created_at)
+ "updated_at": TimestampField().format(workflow.updated_at or workflow.created_at),
}
@@ -133,13 +138,11 @@ def post(self, app_model: App):
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('data', type=str, required=True, nullable=False, location='json')
+ parser.add_argument("data", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
workflow = AppDslService.import_and_overwrite_workflow(
- app_model=app_model,
- data=args['data'],
- account=current_user
+ app_model=app_model, data=args["data"], account=current_user
)
return workflow
@@ -157,21 +160,17 @@ def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, location='json')
- parser.add_argument('query', type=str, required=True, location='json', default='')
- parser.add_argument('files', type=list, location='json')
- parser.add_argument('conversation_id', type=uuid_value, location='json')
+ parser.add_argument("inputs", type=dict, location="json")
+ parser.add_argument("query", type=str, required=True, location="json", default="")
+ parser.add_argument("files", type=list, location="json")
+ parser.add_argument("conversation_id", type=uuid_value, location="json")
args = parser.parse_args()
try:
response = AppGenerateService.generate(
- app_model=app_model,
- user=current_user,
- args=args,
- invoke_from=InvokeFrom.DEBUGGER,
- streaming=True
+ app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=True
)
return helper.compact_generate_response(response)
@@ -185,6 +184,7 @@ def post(self, app_model: App):
logging.exception("internal server error.")
raise InternalServerError()
+
class AdvancedChatDraftRunIterationNodeApi(Resource):
@setup_required
@login_required
@@ -197,18 +197,14 @@ def post(self, app_model: App, node_id: str):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, location='json')
+ parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
response = AppGenerateService.generate_single_iteration(
- app_model=app_model,
- user=current_user,
- node_id=node_id,
- args=args,
- streaming=True
+ app_model=app_model, user=current_user, node_id=node_id, args=args, streaming=True
)
return helper.compact_generate_response(response)
@@ -222,6 +218,7 @@ def post(self, app_model: App, node_id: str):
logging.exception("internal server error.")
raise InternalServerError()
+
class WorkflowDraftRunIterationNodeApi(Resource):
@setup_required
@login_required
@@ -234,18 +231,14 @@ def post(self, app_model: App, node_id: str):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, location='json')
+ parser.add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
try:
response = AppGenerateService.generate_single_iteration(
- app_model=app_model,
- user=current_user,
- node_id=node_id,
- args=args,
- streaming=True
+ app_model=app_model, user=current_user, node_id=node_id, args=args, streaming=True
)
return helper.compact_generate_response(response)
@@ -259,6 +252,7 @@ def post(self, app_model: App, node_id: str):
logging.exception("internal server error.")
raise InternalServerError()
+
class DraftWorkflowRunApi(Resource):
@setup_required
@login_required
@@ -271,19 +265,15 @@ def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, required=True, nullable=False, location='json')
- parser.add_argument('files', type=list, required=False, location='json')
+ parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
+ parser.add_argument("files", type=list, required=False, location="json")
args = parser.parse_args()
try:
response = AppGenerateService.generate(
- app_model=app_model,
- user=current_user,
- args=args,
- invoke_from=InvokeFrom.DEBUGGER,
- streaming=True
+ app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=True
)
return helper.compact_generate_response(response)
@@ -306,12 +296,10 @@ def post(self, app_model: App, task_id: str):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
- return {
- "result": "success"
- }
+ return {"result": "success"}
class DraftWorkflowNodeRunApi(Resource):
@@ -327,24 +315,20 @@ def post(self, app_model: App, node_id: str):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('inputs', type=dict, required=True, nullable=False, location='json')
+ parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
workflow_service = WorkflowService()
workflow_node_execution = workflow_service.run_draft_workflow_node(
- app_model=app_model,
- node_id=node_id,
- user_inputs=args.get('inputs'),
- account=current_user
+ app_model=app_model, node_id=node_id, user_inputs=args.get("inputs"), account=current_user
)
return workflow_node_execution
class PublishedWorkflowApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -357,7 +341,7 @@ def get(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
# fetch published workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_published_workflow(app_model=app_model)
@@ -376,14 +360,11 @@ def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
workflow_service = WorkflowService()
workflow = workflow_service.publish_workflow(app_model=app_model, account=current_user)
- return {
- "result": "success",
- "created_at": TimestampField().format(workflow.created_at)
- }
+ return {"result": "success", "created_at": TimestampField().format(workflow.created_at)}
class DefaultBlockConfigsApi(Resource):
@@ -398,7 +379,7 @@ def get(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
# Get default block configs
workflow_service = WorkflowService()
return workflow_service.get_default_block_configs()
@@ -416,24 +397,21 @@ def get(self, app_model: App, block_type: str):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
parser = reqparse.RequestParser()
- parser.add_argument('q', type=str, location='args')
+ parser.add_argument("q", type=str, location="args")
args = parser.parse_args()
filters = None
- if args.get('q'):
+ if args.get("q"):
try:
- filters = json.loads(args.get('q'))
+ filters = json.loads(args.get("q"))
except json.JSONDecodeError:
- raise ValueError('Invalid filters')
+ raise ValueError("Invalid filters")
# Get default block configs
workflow_service = WorkflowService()
- return workflow_service.get_default_block_config(
- node_type=block_type,
- filters=filters
- )
+ return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
class ConvertToWorkflowApi(Resource):
@@ -450,40 +428,43 @@ def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
-
+
if request.data:
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, required=False, nullable=True, location='json')
- parser.add_argument('icon', type=str, required=False, nullable=True, location='json')
- parser.add_argument('icon_background', type=str, required=False, nullable=True, location='json')
+ parser.add_argument("name", type=str, required=False, nullable=True, location="json")
+ parser.add_argument("icon_type", type=str, required=False, nullable=True, location="json")
+ parser.add_argument("icon", type=str, required=False, nullable=True, location="json")
+ parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
else:
args = {}
# convert to workflow mode
workflow_service = WorkflowService()
- new_app_model = workflow_service.convert_to_workflow(
- app_model=app_model,
- account=current_user,
- args=args
- )
+ new_app_model = workflow_service.convert_to_workflow(app_model=app_model, account=current_user, args=args)
# return app id
return {
- 'new_app_id': new_app_model.id,
+ "new_app_id": new_app_model.id,
}
-api.add_resource(DraftWorkflowApi, '/apps//workflows/draft')
-api.add_resource(DraftWorkflowImportApi, '/apps//workflows/draft/import')
-api.add_resource(AdvancedChatDraftWorkflowRunApi, '/apps//advanced-chat/workflows/draft/run')
-api.add_resource(DraftWorkflowRunApi, '/apps//workflows/draft/run')
-api.add_resource(WorkflowTaskStopApi, '/apps//workflow-runs/tasks//stop')
-api.add_resource(DraftWorkflowNodeRunApi, '/apps//workflows/draft/nodes//run')
-api.add_resource(AdvancedChatDraftRunIterationNodeApi, '/apps//advanced-chat/workflows/draft/iteration/nodes//run')
-api.add_resource(WorkflowDraftRunIterationNodeApi, '/apps//workflows/draft/iteration/nodes//run')
-api.add_resource(PublishedWorkflowApi, '/apps//workflows/publish')
-api.add_resource(DefaultBlockConfigsApi, '/apps//workflows/default-workflow-block-configs')
-api.add_resource(DefaultBlockConfigApi, '/apps//workflows/default-workflow-block-configs'
- '/')
-api.add_resource(ConvertToWorkflowApi, '/apps//convert-to-workflow')
+api.add_resource(DraftWorkflowApi, "/apps//workflows/draft")
+api.add_resource(DraftWorkflowImportApi, "/apps//workflows/draft/import")
+api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps//advanced-chat/workflows/draft/run")
+api.add_resource(DraftWorkflowRunApi, "/apps//workflows/draft/run")
+api.add_resource(WorkflowTaskStopApi, "/apps//workflow-runs/tasks//stop")
+api.add_resource(DraftWorkflowNodeRunApi, "/apps//workflows/draft/nodes//run")
+api.add_resource(
+ AdvancedChatDraftRunIterationNodeApi,
+ "/apps//advanced-chat/workflows/draft/iteration/nodes//run",
+)
+api.add_resource(
+ WorkflowDraftRunIterationNodeApi, "/apps//workflows/draft/iteration/nodes//run"
+)
+api.add_resource(PublishedWorkflowApi, "/apps//workflows/publish")
+api.add_resource(DefaultBlockConfigsApi, "/apps//workflows/default-workflow-block-configs")
+api.add_resource(
+ DefaultBlockConfigApi, "/apps//workflows/default-workflow-block-configs" "/"
+)
+api.add_resource(ConvertToWorkflowApi, "/apps//convert-to-workflow")
diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py
index 6d1709ed8e65d9..dc962409cc4e86 100644
--- a/api/controllers/console/app/workflow_app_log.py
+++ b/api/controllers/console/app/workflow_app_log.py
@@ -22,20 +22,19 @@ def get(self, app_model: App):
Get workflow app logs
"""
parser = reqparse.RequestParser()
- parser.add_argument('keyword', type=str, location='args')
- parser.add_argument('status', type=str, choices=['succeeded', 'failed', 'stopped'], location='args')
- parser.add_argument('page', type=int_range(1, 99999), default=1, location='args')
- parser.add_argument('limit', type=int_range(1, 100), default=20, location='args')
+ parser.add_argument("keyword", type=str, location="args")
+ parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
+ parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
# get paginate workflow app logs
workflow_app_service = WorkflowAppService()
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
- app_model=app_model,
- args=args
+ app_model=app_model, args=args
)
return workflow_app_log_pagination
-api.add_resource(WorkflowAppLogApi, '/apps//workflow-app-logs')
+api.add_resource(WorkflowAppLogApi, "/apps//workflow-app-logs")
diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py
index 35d982e37ce4e3..a055d03deb7879 100644
--- a/api/controllers/console/app/workflow_run.py
+++ b/api/controllers/console/app/workflow_run.py
@@ -28,15 +28,12 @@ def get(self, app_model: App):
Get advanced chat app workflow run list
"""
parser = reqparse.RequestParser()
- parser.add_argument('last_id', type=uuid_value, location='args')
- parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
+ parser.add_argument("last_id", type=uuid_value, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
workflow_run_service = WorkflowRunService()
- result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(
- app_model=app_model,
- args=args
- )
+ result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(app_model=app_model, args=args)
return result
@@ -52,15 +49,12 @@ def get(self, app_model: App):
Get workflow run list
"""
parser = reqparse.RequestParser()
- parser.add_argument('last_id', type=uuid_value, location='args')
- parser.add_argument('limit', type=int_range(1, 100), required=False, default=20, location='args')
+ parser.add_argument("last_id", type=uuid_value, location="args")
+ parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
workflow_run_service = WorkflowRunService()
- result = workflow_run_service.get_paginate_workflow_runs(
- app_model=app_model,
- args=args
- )
+ result = workflow_run_service.get_paginate_workflow_runs(app_model=app_model, args=args)
return result
@@ -98,12 +92,10 @@ def get(self, app_model: App, run_id):
workflow_run_service = WorkflowRunService()
node_executions = workflow_run_service.get_workflow_run_node_executions(app_model=app_model, run_id=run_id)
- return {
- 'data': node_executions
- }
+ return {"data": node_executions}
-api.add_resource(AdvancedChatAppWorkflowRunListApi, '/apps//advanced-chat/workflow-runs')
-api.add_resource(WorkflowRunListApi, '/apps//workflow-runs')
-api.add_resource(WorkflowRunDetailApi, '/apps//workflow-runs/')
-api.add_resource(WorkflowRunNodeExecutionListApi, '/apps//workflow-runs//node-executions')
+api.add_resource(AdvancedChatAppWorkflowRunListApi, "/apps//advanced-chat/workflow-runs")
+api.add_resource(WorkflowRunListApi, "/apps//workflow-runs")
+api.add_resource(WorkflowRunDetailApi, "/apps//workflow-runs/")
+api.add_resource(WorkflowRunNodeExecutionListApi, "/apps//workflow-runs//node-executions")
diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py
index 1d7dc395ff3b18..db2f6835898225 100644
--- a/api/controllers/console/app/workflow_statistic.py
+++ b/api/controllers/console/app/workflow_statistic.py
@@ -26,56 +26,56 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(id) AS runs
FROM workflow_runs
WHERE app_id = :app_id
AND triggered_from = :triggered_from
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
+ """
+ arg_dict = {
+ "tz": account.timezone,
+ "app_id": app_model.id,
+ "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
+ }
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'runs': i.runs
- })
+ response_data.append({"date": str(i.date), "runs": i.runs})
+
+ return jsonify({"data": response_data})
- return jsonify({
- 'data': response_data
- })
class WorkflowDailyTerminalsStatistic(Resource):
@setup_required
@@ -86,56 +86,56 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct workflow_runs.created_by) AS terminal_count
FROM workflow_runs
WHERE app_id = :app_id
AND triggered_from = :triggered_from
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
+ """
+ arg_dict = {
+ "tz": account.timezone,
+ "app_id": app_model.id,
+ "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
+ }
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
- rs = conn.execute(db.text(sql_query), arg_dict)
+ rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'terminal_count': i.terminal_count
- })
+ response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
+
+ return jsonify({"data": response_data})
- return jsonify({
- 'data': response_data
- })
class WorkflowDailyTokenCostStatistic(Resource):
@setup_required
@@ -146,58 +146,63 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
- sql_query = '''
+ sql_query = """
SELECT
date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
SUM(workflow_runs.total_tokens) as token_count
FROM workflow_runs
WHERE app_id = :app_id
AND triggered_from = :triggered_from
- '''
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
+ """
+ arg_dict = {
+ "tz": account.timezone,
+ "app_id": app_model.id,
+ "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
+ }
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at >= :start'
- arg_dict['start'] = start_datetime_utc
+ sql_query += " and created_at >= :start"
+ arg_dict["start"] = start_datetime_utc
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query += ' and created_at < :end'
- arg_dict['end'] = end_datetime_utc
+ sql_query += " and created_at < :end"
+ arg_dict["end"] = end_datetime_utc
- sql_query += ' GROUP BY date order by date'
+ sql_query += " GROUP BY date order by date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'token_count': i.token_count,
- })
+ response_data.append(
+ {
+ "date": str(i.date),
+ "token_count": i.token_count,
+ }
+ )
+
+ return jsonify({"data": response_data})
- return jsonify({
- 'data': response_data
- })
class WorkflowAverageAppInteractionStatistic(Resource):
@setup_required
@@ -208,8 +213,8 @@ def get(self, app_model):
account = current_user
parser = reqparse.RequestParser()
- parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
- parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
+ parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
+ parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """
@@ -229,50 +234,54 @@ def get(self, app_model):
GROUP BY date, c.created_by) sub
GROUP BY sub.date
"""
- arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
+ arg_dict = {
+ "tz": account.timezone,
+ "app_id": app_model.id,
+ "triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
+ }
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
- if args['start']:
- start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
+ if args["start"]:
+ start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
- sql_query = sql_query.replace('{{start}}', ' AND c.created_at >= :start')
- arg_dict['start'] = start_datetime_utc
+ sql_query = sql_query.replace("{{start}}", " AND c.created_at >= :start")
+ arg_dict["start"] = start_datetime_utc
else:
- sql_query = sql_query.replace('{{start}}', '')
+ sql_query = sql_query.replace("{{start}}", "")
- if args['end']:
- end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
+ if args["end"]:
+ end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
- sql_query = sql_query.replace('{{end}}', ' and c.created_at < :end')
- arg_dict['end'] = end_datetime_utc
+ sql_query = sql_query.replace("{{end}}", " and c.created_at < :end")
+ arg_dict["end"] = end_datetime_utc
else:
- sql_query = sql_query.replace('{{end}}', '')
+ sql_query = sql_query.replace("{{end}}", "")
response_data = []
-
+
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
- response_data.append({
- 'date': str(i.date),
- 'interactions': float(i.interactions.quantize(Decimal('0.01')))
- })
-
- return jsonify({
- 'data': response_data
- })
-
-api.add_resource(WorkflowDailyRunsStatistic, '/apps//workflow/statistics/daily-conversations')
-api.add_resource(WorkflowDailyTerminalsStatistic, '/apps//workflow/statistics/daily-terminals')
-api.add_resource(WorkflowDailyTokenCostStatistic, '/apps//workflow/statistics/token-costs')
-api.add_resource(WorkflowAverageAppInteractionStatistic, '/apps//workflow/statistics/average-app-interactions')
+ response_data.append(
+ {"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
+ )
+
+ return jsonify({"data": response_data})
+
+
+api.add_resource(WorkflowDailyRunsStatistic, "/apps//workflow/statistics/daily-conversations")
+api.add_resource(WorkflowDailyTerminalsStatistic, "/apps//workflow/statistics/daily-terminals")
+api.add_resource(WorkflowDailyTokenCostStatistic, "/apps//workflow/statistics/token-costs")
+api.add_resource(
+ WorkflowAverageAppInteractionStatistic, "/apps//workflow/statistics/average-app-interactions"
+)
diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py
index d61ab6d6ae8f28..5e0a4bc814633a 100644
--- a/api/controllers/console/app/wraps.py
+++ b/api/controllers/console/app/wraps.py
@@ -8,24 +8,23 @@
from models.model import App, AppMode
-def get_app_model(view: Optional[Callable] = None, *,
- mode: Union[AppMode, list[AppMode]] = None):
+def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None):
def decorator(view_func):
@wraps(view_func)
def decorated_view(*args, **kwargs):
- if not kwargs.get('app_id'):
- raise ValueError('missing app_id in path parameters')
+ if not kwargs.get("app_id"):
+ raise ValueError("missing app_id in path parameters")
- app_id = kwargs.get('app_id')
+ app_id = kwargs.get("app_id")
app_id = str(app_id)
- del kwargs['app_id']
+ del kwargs["app_id"]
- app_model = db.session.query(App).filter(
- App.id == app_id,
- App.tenant_id == current_user.current_tenant_id,
- App.status == 'normal'
- ).first()
+ app_model = (
+ db.session.query(App)
+ .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
+ .first()
+ )
if not app_model:
raise AppNotFoundError()
@@ -44,9 +43,10 @@ def decorated_view(*args, **kwargs):
mode_values = {m.value for m in modes}
raise AppNotFoundError(f"App mode is not in the supported list: {mode_values}")
- kwargs['app_model'] = app_model
+ kwargs["app_model"] = app_model
return view_func(*args, **kwargs)
+
return decorated_view
if view is None:
diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py
index 8efb55cdb64edc..8ba6b53e7ead2b 100644
--- a/api/controllers/console/auth/activate.py
+++ b/api/controllers/console/auth/activate.py
@@ -17,60 +17,61 @@
class ActivateCheckApi(Resource):
def get(self):
parser = reqparse.RequestParser()
- parser.add_argument('workspace_id', type=str, required=False, nullable=True, location='args')
- parser.add_argument('email', type=email, required=False, nullable=True, location='args')
- parser.add_argument('token', type=str, required=True, nullable=False, location='args')
+ parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="args")
+ parser.add_argument("email", type=email, required=False, nullable=True, location="args")
+ parser.add_argument("token", type=str, required=True, nullable=False, location="args")
args = parser.parse_args()
- workspaceId = args['workspace_id']
- reg_email = args['email']
- token = args['token']
+ workspaceId = args["workspace_id"]
+ reg_email = args["email"]
+ token = args["token"]
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
- return {'is_valid': invitation is not None, 'workspace_name': invitation['tenant'].name if invitation else None}
+ return {"is_valid": invitation is not None, "workspace_name": invitation["tenant"].name if invitation else None}
class ActivateApi(Resource):
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('workspace_id', type=str, required=False, nullable=True, location='json')
- parser.add_argument('email', type=email, required=False, nullable=True, location='json')
- parser.add_argument('token', type=str, required=True, nullable=False, location='json')
- parser.add_argument('name', type=str_len(30), required=True, nullable=False, location='json')
- parser.add_argument('password', type=valid_password, required=True, nullable=False, location='json')
- parser.add_argument('interface_language', type=supported_language, required=True, nullable=False,
- location='json')
- parser.add_argument('timezone', type=timezone, required=True, nullable=False, location='json')
+ parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
+ parser.add_argument("email", type=email, required=False, nullable=True, location="json")
+ parser.add_argument("token", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("name", type=str_len(30), required=True, nullable=False, location="json")
+ parser.add_argument("password", type=valid_password, required=True, nullable=False, location="json")
+ parser.add_argument(
+ "interface_language", type=supported_language, required=True, nullable=False, location="json"
+ )
+ parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json")
args = parser.parse_args()
- invitation = RegisterService.get_invitation_if_token_valid(args['workspace_id'], args['email'], args['token'])
+ invitation = RegisterService.get_invitation_if_token_valid(args["workspace_id"], args["email"], args["token"])
if invitation is None:
raise AlreadyActivateError()
- RegisterService.revoke_token(args['workspace_id'], args['email'], args['token'])
+ RegisterService.revoke_token(args["workspace_id"], args["email"], args["token"])
- account = invitation['account']
- account.name = args['name']
+ account = invitation["account"]
+ account.name = args["name"]
# generate password salt
salt = secrets.token_bytes(16)
base64_salt = base64.b64encode(salt).decode()
# encrypt password with salt
- password_hashed = hash_password(args['password'], salt)
+ password_hashed = hash_password(args["password"], salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
account.password = base64_password_hashed
account.password_salt = base64_salt
- account.interface_language = args['interface_language']
- account.timezone = args['timezone']
- account.interface_theme = 'light'
+ account.interface_language = args["interface_language"]
+ account.timezone = args["timezone"]
+ account.interface_theme = "light"
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit()
- return {'result': 'success'}
+ return {"result": "success"}
-api.add_resource(ActivateCheckApi, '/activate/check')
-api.add_resource(ActivateApi, '/activate')
+api.add_resource(ActivateCheckApi, "/activate/check")
+api.add_resource(ActivateApi, "/activate")
diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py
index f79b93b74f6df3..50db6eebc13d3d 100644
--- a/api/controllers/console/auth/data_source_bearer_auth.py
+++ b/api/controllers/console/auth/data_source_bearer_auth.py
@@ -19,18 +19,19 @@ def get(self):
data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id)
if data_source_api_key_bindings:
return {
- 'sources': [{
- 'id': data_source_api_key_binding.id,
- 'category': data_source_api_key_binding.category,
- 'provider': data_source_api_key_binding.provider,
- 'disabled': data_source_api_key_binding.disabled,
- 'created_at': int(data_source_api_key_binding.created_at.timestamp()),
- 'updated_at': int(data_source_api_key_binding.updated_at.timestamp()),
- }
- for data_source_api_key_binding in
- data_source_api_key_bindings]
+ "sources": [
+ {
+ "id": data_source_api_key_binding.id,
+ "category": data_source_api_key_binding.category,
+ "provider": data_source_api_key_binding.provider,
+ "disabled": data_source_api_key_binding.disabled,
+ "created_at": int(data_source_api_key_binding.created_at.timestamp()),
+ "updated_at": int(data_source_api_key_binding.updated_at.timestamp()),
+ }
+ for data_source_api_key_binding in data_source_api_key_bindings
+ ]
}
- return {'sources': []}
+ return {"sources": []}
class ApiKeyAuthDataSourceBinding(Resource):
@@ -42,16 +43,16 @@ def post(self):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('category', type=str, required=True, nullable=False, location='json')
- parser.add_argument('provider', type=str, required=True, nullable=False, location='json')
- parser.add_argument('credentials', type=dict, required=True, nullable=False, location='json')
+ parser.add_argument("category", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("provider", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
ApiKeyAuthService.validate_api_key_auth_args(args)
try:
ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args)
except Exception as e:
raise ApiKeyAuthFailedError(str(e))
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class ApiKeyAuthDataSourceBindingDelete(Resource):
@@ -65,9 +66,9 @@ def delete(self, binding_id):
ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
-api.add_resource(ApiKeyAuthDataSource, '/api-key-auth/data-source')
-api.add_resource(ApiKeyAuthDataSourceBinding, '/api-key-auth/data-source/binding')
-api.add_resource(ApiKeyAuthDataSourceBindingDelete, '/api-key-auth/data-source/')
+api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source")
+api.add_resource(ApiKeyAuthDataSourceBinding, "/api-key-auth/data-source/binding")
+api.add_resource(ApiKeyAuthDataSourceBindingDelete, "/api-key-auth/data-source/")
diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py
index 45cfa9d7ebcb1b..fd31e5ccc3b99c 100644
--- a/api/controllers/console/auth/data_source_oauth.py
+++ b/api/controllers/console/auth/data_source_oauth.py
@@ -17,13 +17,13 @@
def get_oauth_providers():
with current_app.app_context():
- notion_oauth = NotionOAuth(client_id=dify_config.NOTION_CLIENT_ID,
- client_secret=dify_config.NOTION_CLIENT_SECRET,
- redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/data-source/callback/notion')
+ notion_oauth = NotionOAuth(
+ client_id=dify_config.NOTION_CLIENT_ID,
+ client_secret=dify_config.NOTION_CLIENT_SECRET,
+ redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
+ )
- OAUTH_PROVIDERS = {
- 'notion': notion_oauth
- }
+ OAUTH_PROVIDERS = {"notion": notion_oauth}
return OAUTH_PROVIDERS
@@ -37,18 +37,16 @@ def get(self, provider: str):
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
print(vars(oauth_provider))
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
- if dify_config.NOTION_INTEGRATION_TYPE == 'internal':
+ return {"error": "Invalid provider"}, 400
+ if dify_config.NOTION_INTEGRATION_TYPE == "internal":
internal_secret = dify_config.NOTION_INTERNAL_SECRET
if not internal_secret:
- return {'error': 'Internal secret is not set'},
+ return ({"error": "Internal secret is not set"},)
oauth_provider.save_internal_access_token(internal_secret)
- return { 'data': '' }
+ return {"data": ""}
else:
auth_url = oauth_provider.get_authorization_url()
- return { 'data': auth_url }, 200
-
-
+ return {"data": auth_url}, 200
class OAuthDataSourceCallback(Resource):
@@ -57,18 +55,18 @@ def get(self, provider: str):
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
- if 'code' in request.args:
- code = request.args.get('code')
+ return {"error": "Invalid provider"}, 400
+ if "code" in request.args:
+ code = request.args.get("code")
- return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&code={code}')
- elif 'error' in request.args:
- error = request.args.get('error')
+ return redirect(f"{dify_config.CONSOLE_WEB_URL}?type=notion&code={code}")
+ elif "error" in request.args:
+ error = request.args.get("error")
- return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error={error}')
+ return redirect(f"{dify_config.CONSOLE_WEB_URL}?type=notion&error={error}")
else:
- return redirect(f'{dify_config.CONSOLE_WEB_URL}?type=notion&error=Access denied')
-
+ return redirect(f"{dify_config.CONSOLE_WEB_URL}?type=notion&error=Access denied")
+
class OAuthDataSourceBinding(Resource):
def get(self, provider: str):
@@ -76,17 +74,18 @@ def get(self, provider: str):
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
- if 'code' in request.args:
- code = request.args.get('code')
+ return {"error": "Invalid provider"}, 400
+ if "code" in request.args:
+ code = request.args.get("code")
try:
oauth_provider.get_access_token(code)
except requests.exceptions.HTTPError as e:
logging.exception(
- f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
- return {'error': 'OAuth data source process failed'}, 400
+ f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}"
+ )
+ return {"error": "OAuth data source process failed"}, 400
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class OAuthDataSourceSync(Resource):
@@ -100,18 +99,17 @@ def get(self, provider, binding_id):
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
+ return {"error": "Invalid provider"}, 400
try:
oauth_provider.sync_data_source(binding_id)
except requests.exceptions.HTTPError as e:
- logging.exception(
- f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
- return {'error': 'OAuth data source process failed'}, 400
+ logging.exception(f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
+ return {"error": "OAuth data source process failed"}, 400
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
-api.add_resource(OAuthDataSource, '/oauth/data-source/')
-api.add_resource(OAuthDataSourceCallback, '/oauth/data-source/callback/')
-api.add_resource(OAuthDataSourceBinding, '/oauth/data-source/binding/')
-api.add_resource(OAuthDataSourceSync, '/oauth/data-source///sync')
+api.add_resource(OAuthDataSource, "/oauth/data-source/")
+api.add_resource(OAuthDataSourceCallback, "/oauth/data-source/callback/")
+api.add_resource(OAuthDataSourceBinding, "/oauth/data-source/binding/")
+api.add_resource(OAuthDataSourceSync, "/oauth/data-source///sync")
diff --git a/api/controllers/console/auth/error.py b/api/controllers/console/auth/error.py
index 53dab3298fbff3..ea23e097d0bf3e 100644
--- a/api/controllers/console/auth/error.py
+++ b/api/controllers/console/auth/error.py
@@ -2,31 +2,30 @@
class ApiKeyAuthFailedError(BaseHTTPException):
- error_code = 'auth_failed'
+ error_code = "auth_failed"
description = "{message}"
code = 500
class InvalidEmailError(BaseHTTPException):
- error_code = 'invalid_email'
+ error_code = "invalid_email"
description = "The email address is not valid."
code = 400
class PasswordMismatchError(BaseHTTPException):
- error_code = 'password_mismatch'
+ error_code = "password_mismatch"
description = "The passwords do not match."
code = 400
class InvalidTokenError(BaseHTTPException):
- error_code = 'invalid_or_expired_token'
+ error_code = "invalid_or_expired_token"
description = "The token is invalid or has expired."
code = 400
class PasswordResetRateLimitExceededError(BaseHTTPException):
- error_code = 'password_reset_rate_limit_exceeded'
+ error_code = "password_reset_rate_limit_exceeded"
description = "Password reset rate limit exceeded. Try again later."
code = 429
-
diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py
index d78be770abd094..0b01a4906adbcf 100644
--- a/api/controllers/console/auth/forgot_password.py
+++ b/api/controllers/console/auth/forgot_password.py
@@ -21,14 +21,13 @@
class ForgotPasswordSendEmailApi(Resource):
-
@setup_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('email', type=str, required=True, location='json')
+ parser.add_argument("email", type=str, required=True, location="json")
args = parser.parse_args()
- email = args['email']
+ email = args["email"]
if not email_validate(email):
raise InvalidEmailError()
@@ -49,38 +48,36 @@ def post(self):
class ForgotPasswordCheckApi(Resource):
-
@setup_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('token', type=str, required=True, nullable=False, location='json')
+ parser.add_argument("token", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
- token = args['token']
+ token = args["token"]
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
- return {'is_valid': False, 'email': None}
- return {'is_valid': True, 'email': reset_data.get('email')}
+ return {"is_valid": False, "email": None}
+ return {"is_valid": True, "email": reset_data.get("email")}
class ForgotPasswordResetApi(Resource):
-
@setup_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('token', type=str, required=True, nullable=False, location='json')
- parser.add_argument('new_password', type=valid_password, required=True, nullable=False, location='json')
- parser.add_argument('password_confirm', type=valid_password, required=True, nullable=False, location='json')
+ parser.add_argument("token", type=str, required=True, nullable=False, location="json")
+ parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json")
+ parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json")
args = parser.parse_args()
- new_password = args['new_password']
- password_confirm = args['password_confirm']
+ new_password = args["new_password"]
+ password_confirm = args["password_confirm"]
if str(new_password).strip() != str(password_confirm).strip():
raise PasswordMismatchError()
- token = args['token']
+ token = args["token"]
reset_data = AccountService.get_reset_password_data(token)
if reset_data is None:
@@ -94,14 +91,14 @@ def post(self):
password_hashed = hash_password(new_password, salt)
base64_password_hashed = base64.b64encode(password_hashed).decode()
- account = Account.query.filter_by(email=reset_data.get('email')).first()
+ account = Account.query.filter_by(email=reset_data.get("email")).first()
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.commit()
- return {'result': 'success'}
+ return {"result": "success"}
-api.add_resource(ForgotPasswordSendEmailApi, '/forgot-password')
-api.add_resource(ForgotPasswordCheckApi, '/forgot-password/validity')
-api.add_resource(ForgotPasswordResetApi, '/forgot-password/resets')
+api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password")
+api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity")
+api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets")
diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py
index c135ece67ef86c..62837af2b9b0eb 100644
--- a/api/controllers/console/auth/login.py
+++ b/api/controllers/console/auth/login.py
@@ -20,37 +20,39 @@ class LoginApi(Resource):
def post(self):
"""Authenticate user and login."""
parser = reqparse.RequestParser()
- parser.add_argument('email', type=email, required=True, location='json')
- parser.add_argument('password', type=valid_password, required=True, location='json')
- parser.add_argument('remember_me', type=bool, required=False, default=False, location='json')
+ parser.add_argument("email", type=email, required=True, location="json")
+ parser.add_argument("password", type=valid_password, required=True, location="json")
+ parser.add_argument("remember_me", type=bool, required=False, default=False, location="json")
args = parser.parse_args()
# todo: Verify the recaptcha
try:
- account = AccountService.authenticate(args['email'], args['password'])
+ account = AccountService.authenticate(args["email"], args["password"])
except services.errors.account.AccountLoginError as e:
- return {'code': 'unauthorized', 'message': str(e)}, 401
+ return {"code": "unauthorized", "message": str(e)}, 401
# SELF_HOSTED only have one workspace
tenants = TenantService.get_join_tenants(account)
if len(tenants) == 0:
- return {'result': 'fail', 'data': 'workspace not found, please contact system admin to invite you to join in a workspace'}
+ return {
+ "result": "fail",
+ "data": "workspace not found, please contact system admin to invite you to join in a workspace",
+ }
token = AccountService.login(account, ip_address=get_remote_ip(request))
- return {'result': 'success', 'data': token}
+ return {"result": "success", "data": token}
class LogoutApi(Resource):
-
@setup_required
def get(self):
account = cast(Account, flask_login.current_user)
- token = request.headers.get('Authorization', '').split(' ')[1]
+ token = request.headers.get("Authorization", "").split(" ")[1]
AccountService.logout(account=account, token=token)
flask_login.logout_user()
- return {'result': 'success'}
+ return {"result": "success"}
class ResetPasswordApi(Resource):
@@ -80,11 +82,11 @@ def get(self):
# 'subject': 'Reset your Dify password',
# 'html': """
# Dear User,
- # The Dify team has generated a new password for you, details as follows:
+ # The Dify team has generated a new password for you, details as follows:
# {new_password}
# Please change your password to log in as soon as possible.
# Regards,
- # The Dify Team
+ # The Dify Team
# """
# }
@@ -101,8 +103,8 @@ def get(self):
# # handle error
# pass
- return {'result': 'success'}
+ return {"result": "success"}
-api.add_resource(LoginApi, '/login')
-api.add_resource(LogoutApi, '/logout')
+api.add_resource(LoginApi, "/login")
+api.add_resource(LogoutApi, "/logout")
diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py
index 4a651bfe7b009e..ae1b49f3ecf448 100644
--- a/api/controllers/console/auth/oauth.py
+++ b/api/controllers/console/auth/oauth.py
@@ -25,7 +25,7 @@ def get_oauth_providers():
github_oauth = GitHubOAuth(
client_id=dify_config.GITHUB_CLIENT_ID,
client_secret=dify_config.GITHUB_CLIENT_SECRET,
- redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/github',
+ redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/authorize/github",
)
if not dify_config.GOOGLE_CLIENT_ID or not dify_config.GOOGLE_CLIENT_SECRET:
google_oauth = None
@@ -33,10 +33,10 @@ def get_oauth_providers():
google_oauth = GoogleOAuth(
client_id=dify_config.GOOGLE_CLIENT_ID,
client_secret=dify_config.GOOGLE_CLIENT_SECRET,
- redirect_uri=dify_config.CONSOLE_API_URL + '/console/api/oauth/authorize/google',
+ redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/authorize/google",
)
- OAUTH_PROVIDERS = {'github': github_oauth, 'google': google_oauth}
+ OAUTH_PROVIDERS = {"github": github_oauth, "google": google_oauth}
return OAUTH_PROVIDERS
@@ -47,7 +47,7 @@ def get(self, provider: str):
oauth_provider = OAUTH_PROVIDERS.get(provider)
print(vars(oauth_provider))
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
+ return {"error": "Invalid provider"}, 400
auth_url = oauth_provider.get_authorization_url()
return redirect(auth_url)
@@ -59,20 +59,20 @@ def get(self, provider: str):
with current_app.app_context():
oauth_provider = OAUTH_PROVIDERS.get(provider)
if not oauth_provider:
- return {'error': 'Invalid provider'}, 400
+ return {"error": "Invalid provider"}, 400
- code = request.args.get('code')
+ code = request.args.get("code")
try:
token = oauth_provider.get_access_token(code)
user_info = oauth_provider.get_user_info(token)
except requests.exceptions.HTTPError as e:
- logging.exception(f'An error occurred during the OAuth process with {provider}: {e.response.text}')
- return {'error': 'OAuth process failed'}, 400
+ logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
+ return {"error": "OAuth process failed"}, 400
account = _generate_account(provider, user_info)
# Check account status
if account.status == AccountStatus.BANNED.value or account.status == AccountStatus.CLOSED.value:
- return {'error': 'Account is banned or closed.'}, 403
+ return {"error": "Account is banned or closed."}, 403
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
@@ -83,7 +83,7 @@ def get(self, provider: str):
token = AccountService.login(account, ip_address=get_remote_ip(request))
- return redirect(f'{dify_config.CONSOLE_WEB_URL}?console_token={token}')
+ return redirect(f"{dify_config.CONSOLE_WEB_URL}?console_token={token}")
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
@@ -101,7 +101,7 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
if not account:
# Create account
- account_name = user_info.name if user_info.name else 'Dify'
+ account_name = user_info.name if user_info.name else "Dify"
account = RegisterService.register(
email=user_info.email, name=account_name, password=None, open_id=user_info.id, provider=provider
)
@@ -121,5 +121,5 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
return account
-api.add_resource(OAuthLogin, '/oauth/login/')
-api.add_resource(OAuthCallback, '/oauth/authorize/')
+api.add_resource(OAuthLogin, "/oauth/login/")
+api.add_resource(OAuthCallback, "/oauth/authorize/")
diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py
index 72a6129efa3e4d..9a1d9148696349 100644
--- a/api/controllers/console/billing/billing.py
+++ b/api/controllers/console/billing/billing.py
@@ -9,28 +9,24 @@
class Subscription(Resource):
-
@setup_required
@login_required
@account_initialization_required
@only_edition_cloud
def get(self):
-
parser = reqparse.RequestParser()
- parser.add_argument('plan', type=str, required=True, location='args', choices=['professional', 'team'])
- parser.add_argument('interval', type=str, required=True, location='args', choices=['month', 'year'])
+ parser.add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"])
+ parser.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"])
args = parser.parse_args()
BillingService.is_tenant_owner_or_admin(current_user)
- return BillingService.get_subscription(args['plan'],
- args['interval'],
- current_user.email,
- current_user.current_tenant_id)
+ return BillingService.get_subscription(
+ args["plan"], args["interval"], current_user.email, current_user.current_tenant_id
+ )
class Invoices(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -40,5 +36,5 @@ def get(self):
return BillingService.get_invoices(current_user.email, current_user.current_tenant_id)
-api.add_resource(Subscription, '/billing/subscription')
-api.add_resource(Invoices, '/billing/invoices')
+api.add_resource(Subscription, "/billing/subscription")
+api.add_resource(Invoices, "/billing/invoices")
diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py
index 0ca0f0a85653dc..0e1acab946ae5f 100644
--- a/api/controllers/console/datasets/data_source.py
+++ b/api/controllers/console/datasets/data_source.py
@@ -22,19 +22,22 @@
class DataSourceApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_list_fields)
def get(self):
# get workspace data source integrates
- data_source_integrates = db.session.query(DataSourceOauthBinding).filter(
- DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
- DataSourceOauthBinding.disabled == False
- ).all()
+ data_source_integrates = (
+ db.session.query(DataSourceOauthBinding)
+ .filter(
+ DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
+ DataSourceOauthBinding.disabled == False,
+ )
+ .all()
+ )
- base_url = request.url_root.rstrip('/')
+ base_url = request.url_root.rstrip("/")
data_source_oauth_base_path = "/console/api/oauth/data-source"
providers = ["notion"]
@@ -44,26 +47,30 @@ def get(self):
existing_integrates = filter(lambda item: item.provider == provider, data_source_integrates)
if existing_integrates:
for existing_integrate in list(existing_integrates):
- integrate_data.append({
- 'id': existing_integrate.id,
- 'provider': provider,
- 'created_at': existing_integrate.created_at,
- 'is_bound': True,
- 'disabled': existing_integrate.disabled,
- 'source_info': existing_integrate.source_info,
- 'link': f'{base_url}{data_source_oauth_base_path}/{provider}'
- })
+ integrate_data.append(
+ {
+ "id": existing_integrate.id,
+ "provider": provider,
+ "created_at": existing_integrate.created_at,
+ "is_bound": True,
+ "disabled": existing_integrate.disabled,
+ "source_info": existing_integrate.source_info,
+ "link": f"{base_url}{data_source_oauth_base_path}/{provider}",
+ }
+ )
else:
- integrate_data.append({
- 'id': None,
- 'provider': provider,
- 'created_at': None,
- 'source_info': None,
- 'is_bound': False,
- 'disabled': None,
- 'link': f'{base_url}{data_source_oauth_base_path}/{provider}'
- })
- return {'data': integrate_data}, 200
+ integrate_data.append(
+ {
+ "id": None,
+ "provider": provider,
+ "created_at": None,
+ "source_info": None,
+ "is_bound": False,
+ "disabled": None,
+ "link": f"{base_url}{data_source_oauth_base_path}/{provider}",
+ }
+ )
+ return {"data": integrate_data}, 200
@setup_required
@login_required
@@ -71,92 +78,82 @@ def get(self):
def patch(self, binding_id, action):
binding_id = str(binding_id)
action = str(action)
- data_source_binding = DataSourceOauthBinding.query.filter_by(
- id=binding_id
- ).first()
+ data_source_binding = DataSourceOauthBinding.query.filter_by(id=binding_id).first()
if data_source_binding is None:
- raise NotFound('Data source binding not found.')
+ raise NotFound("Data source binding not found.")
# enable binding
- if action == 'enable':
+ if action == "enable":
if data_source_binding.disabled:
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(data_source_binding)
db.session.commit()
else:
- raise ValueError('Data source is not disabled.')
+ raise ValueError("Data source is not disabled.")
# disable binding
- if action == 'disable':
+ if action == "disable":
if not data_source_binding.disabled:
data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(data_source_binding)
db.session.commit()
else:
- raise ValueError('Data source is disabled.')
- return {'result': 'success'}, 200
+ raise ValueError("Data source is disabled.")
+ return {"result": "success"}, 200
class DataSourceNotionListApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@marshal_with(integrate_notion_info_list_fields)
def get(self):
- dataset_id = request.args.get('dataset_id', default=None, type=str)
+ dataset_id = request.args.get("dataset_id", default=None, type=str)
exist_page_ids = []
# import notion in the exist dataset
if dataset_id:
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
- if dataset.data_source_type != 'notion_import':
- raise ValueError('Dataset is not notion type.')
+ raise NotFound("Dataset not found.")
+ if dataset.data_source_type != "notion_import":
+ raise ValueError("Dataset is not notion type.")
documents = Document.query.filter_by(
dataset_id=dataset_id,
tenant_id=current_user.current_tenant_id,
- data_source_type='notion_import',
- enabled=True
+ data_source_type="notion_import",
+ enabled=True,
).all()
if documents:
for document in documents:
data_source_info = json.loads(document.data_source_info)
- exist_page_ids.append(data_source_info['notion_page_id'])
+ exist_page_ids.append(data_source_info["notion_page_id"])
# get all authorized pages
data_source_bindings = DataSourceOauthBinding.query.filter_by(
- tenant_id=current_user.current_tenant_id,
- provider='notion',
- disabled=False
+ tenant_id=current_user.current_tenant_id, provider="notion", disabled=False
).all()
if not data_source_bindings:
- return {
- 'notion_info': []
- }, 200
+ return {"notion_info": []}, 200
pre_import_info_list = []
for data_source_binding in data_source_bindings:
source_info = data_source_binding.source_info
- pages = source_info['pages']
+ pages = source_info["pages"]
# Filter out already bound pages
for page in pages:
- if page['page_id'] in exist_page_ids:
- page['is_bound'] = True
+ if page["page_id"] in exist_page_ids:
+ page["is_bound"] = True
else:
- page['is_bound'] = False
+ page["is_bound"] = False
pre_import_info = {
- 'workspace_name': source_info['workspace_name'],
- 'workspace_icon': source_info['workspace_icon'],
- 'workspace_id': source_info['workspace_id'],
- 'pages': pages,
+ "workspace_name": source_info["workspace_name"],
+ "workspace_icon": source_info["workspace_icon"],
+ "workspace_id": source_info["workspace_id"],
+ "pages": pages,
}
pre_import_info_list.append(pre_import_info)
- return {
- 'notion_info': pre_import_info_list
- }, 200
+ return {"notion_info": pre_import_info_list}, 200
class DataSourceNotionApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -166,64 +163,67 @@ def get(self, workspace_id, page_id, page_type):
data_source_binding = DataSourceOauthBinding.query.filter(
db.and_(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
- DataSourceOauthBinding.provider == 'notion',
+ DataSourceOauthBinding.provider == "notion",
DataSourceOauthBinding.disabled == False,
- DataSourceOauthBinding.source_info['workspace_id'] == f'"{workspace_id}"'
+ DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
).first()
if not data_source_binding:
- raise NotFound('Data source binding not found.')
+ raise NotFound("Data source binding not found.")
extractor = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=data_source_binding.access_token,
- tenant_id=current_user.current_tenant_id
+ tenant_id=current_user.current_tenant_id,
)
text_docs = extractor.extract()
- return {
- 'content': "\n".join([doc.page_content for doc in text_docs])
- }, 200
+ return {"content": "\n".join([doc.page_content for doc in text_docs])}, 200
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('notion_info_list', type=list, required=True, nullable=True, location='json')
- parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
- parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
- parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False, location='json')
+ parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json")
+ parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
+ parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
+ parser.add_argument(
+ "doc_language", type=str, default="English", required=False, nullable=False, location="json"
+ )
args = parser.parse_args()
# validate args
DocumentService.estimate_args_validate(args)
- notion_info_list = args['notion_info_list']
+ notion_info_list = args["notion_info_list"]
extract_settings = []
for notion_info in notion_info_list:
- workspace_id = notion_info['workspace_id']
- for page in notion_info['pages']:
+ workspace_id = notion_info["workspace_id"]
+ for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type="notion_import",
notion_info={
"notion_workspace_id": workspace_id,
- "notion_obj_id": page['page_id'],
- "notion_page_type": page['type'],
- "tenant_id": current_user.current_tenant_id
+ "notion_obj_id": page["page_id"],
+ "notion_page_type": page["type"],
+ "tenant_id": current_user.current_tenant_id,
},
- document_model=args['doc_form']
+ document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
indexing_runner = IndexingRunner()
- response = indexing_runner.indexing_estimate(current_user.current_tenant_id, extract_settings,
- args['process_rule'], args['doc_form'],
- args['doc_language'])
+ response = indexing_runner.indexing_estimate(
+ current_user.current_tenant_id,
+ extract_settings,
+ args["process_rule"],
+ args["doc_form"],
+ args["doc_language"],
+ )
return response, 200
class DataSourceNotionDatasetSyncApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -240,7 +240,6 @@ def get(self, dataset_id):
class DataSourceNotionDocumentSyncApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -258,10 +257,14 @@ def get(self, dataset_id, document_id):
return 200
-api.add_resource(DataSourceApi, '/data-source/integrates', '/data-source/integrates//')
-api.add_resource(DataSourceNotionListApi, '/notion/pre-import/pages')
-api.add_resource(DataSourceNotionApi,
- '/notion/workspaces//pages///preview',
- '/datasets/notion-indexing-estimate')
-api.add_resource(DataSourceNotionDatasetSyncApi, '/datasets//notion/sync')
-api.add_resource(DataSourceNotionDocumentSyncApi, '/datasets//documents//notion/sync')
+api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates//")
+api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages")
+api.add_resource(
+ DataSourceNotionApi,
+ "/notion/workspaces//pages///preview",
+ "/datasets/notion-indexing-estimate",
+)
+api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets//notion/sync")
+api.add_resource(
+ DataSourceNotionDocumentSyncApi, "/datasets//documents//notion/sync"
+)
diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py
index be0281f07af27a..44c1390c14fa62 100644
--- a/api/controllers/console/datasets/datasets.py
+++ b/api/controllers/console/datasets/datasets.py
@@ -24,52 +24,47 @@
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
from fields.document_fields import document_status_fields
from libs.login import login_required
-from models.dataset import Dataset, Document, DocumentSegment
+from models.dataset import Dataset, DatasetPermissionEnum, Document, DocumentSegment
from models.model import ApiToken, UploadFile
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
- raise ValueError('Name must be between 1 to 40 characters.')
+ raise ValueError("Name must be between 1 to 40 characters.")
return name
def _validate_description_length(description):
if len(description) > 400:
- raise ValueError('Description cannot exceed 400 characters.')
+ raise ValueError("Description cannot exceed 400 characters.")
return description
class DatasetListApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
def get(self):
- page = request.args.get('page', default=1, type=int)
- limit = request.args.get('limit', default=20, type=int)
- ids = request.args.getlist('ids')
- provider = request.args.get('provider', default="vendor")
- search = request.args.get('keyword', default=None, type=str)
- tag_ids = request.args.getlist('tag_ids')
+ page = request.args.get("page", default=1, type=int)
+ limit = request.args.get("limit", default=20, type=int)
+ ids = request.args.getlist("ids")
+ provider = request.args.get("provider", default="vendor")
+ search = request.args.get("keyword", default=None, type=str)
+ tag_ids = request.args.getlist("tag_ids")
if ids:
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
else:
- datasets, total = DatasetService.get_datasets(page, limit, provider,
- current_user.current_tenant_id, current_user, search, tag_ids)
+ datasets, total = DatasetService.get_datasets(
+ page, limit, provider, current_user.current_tenant_id, current_user, search, tag_ids
+ )
# check embedding setting
provider_manager = ProviderManager()
- configurations = provider_manager.get_configurations(
- tenant_id=current_user.current_tenant_id
- )
+ configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id)
- embedding_models = configurations.get_models(
- model_type=ModelType.TEXT_EMBEDDING,
- only_active=True
- )
+ embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
model_names = []
for embedding_model in embedding_models:
@@ -77,28 +72,22 @@ def get(self):
data = marshal(datasets, dataset_detail_fields)
for item in data:
- if item['indexing_technique'] == 'high_quality':
+ if item["indexing_technique"] == "high_quality":
item_model = f"{item['embedding_model']}:{item['embedding_model_provider']}"
if item_model in model_names:
- item['embedding_available'] = True
+ item["embedding_available"] = True
else:
- item['embedding_available'] = False
+ item["embedding_available"] = False
else:
- item['embedding_available'] = True
+ item["embedding_available"] = True
- if item.get('permission') == 'partial_members':
- part_users_list = DatasetPermissionService.get_dataset_partial_member_list(item['id'])
- item.update({'partial_member_list': part_users_list})
+ if item.get("permission") == "partial_members":
+ part_users_list = DatasetPermissionService.get_dataset_partial_member_list(item["id"])
+ item.update({"partial_member_list": part_users_list})
else:
- item.update({'partial_member_list': []})
+ item.update({"partial_member_list": []})
- response = {
- 'data': data,
- 'has_more': len(datasets) == limit,
- 'limit': limit,
- 'total': total,
- 'page': page
- }
+ response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response, 200
@setup_required
@@ -106,13 +95,21 @@ def get(self):
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('name', nullable=False, required=True,
- help='type is required. Name must be between 1 to 40 characters.',
- type=_validate_name)
- parser.add_argument('indexing_technique', type=str, location='json',
- choices=Dataset.INDEXING_TECHNIQUE_LIST,
- nullable=True,
- help='Invalid indexing technique.')
+ parser.add_argument(
+ "name",
+ nullable=False,
+ required=True,
+ help="type is required. Name must be between 1 to 40 characters.",
+ type=_validate_name,
+ )
+ parser.add_argument(
+ "indexing_technique",
+ type=str,
+ location="json",
+ choices=Dataset.INDEXING_TECHNIQUE_LIST,
+ nullable=True,
+ help="Invalid indexing technique.",
+ )
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
@@ -122,9 +119,10 @@ def post(self):
try:
dataset = DatasetService.create_empty_dataset(
tenant_id=current_user.current_tenant_id,
- name=args['name'],
- indexing_technique=args['indexing_technique'],
- account=current_user
+ name=args["name"],
+ indexing_technique=args["indexing_technique"],
+ account=current_user,
+ permission=DatasetPermissionEnum.ONLY_ME,
)
except services.errors.dataset.DatasetNameDuplicateError:
raise DatasetNameDuplicateError()
@@ -142,42 +140,36 @@ def get(self, dataset_id):
if dataset is None:
raise NotFound("Dataset not found.")
try:
- DatasetService.check_dataset_permission(
- dataset, current_user)
+ DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
data = marshal(dataset, dataset_detail_fields)
- if data.get('permission') == 'partial_members':
+ if data.get("permission") == "partial_members":
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
- data.update({'partial_member_list': part_users_list})
+ data.update({"partial_member_list": part_users_list})
# check embedding setting
provider_manager = ProviderManager()
- configurations = provider_manager.get_configurations(
- tenant_id=current_user.current_tenant_id
- )
+ configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id)
- embedding_models = configurations.get_models(
- model_type=ModelType.TEXT_EMBEDDING,
- only_active=True
- )
+ embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True)
model_names = []
for embedding_model in embedding_models:
model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}")
- if data['indexing_technique'] == 'high_quality':
+ if data["indexing_technique"] == "high_quality":
item_model = f"{data['embedding_model']}:{data['embedding_model_provider']}"
if item_model in model_names:
- data['embedding_available'] = True
+ data["embedding_available"] = True
else:
- data['embedding_available'] = False
+ data["embedding_available"] = False
else:
- data['embedding_available'] = True
+ data["embedding_available"] = True
- if data.get('permission') == 'partial_members':
+ if data.get("permission") == "partial_members":
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
- data.update({'partial_member_list': part_users_list})
+ data.update({"partial_member_list": part_users_list})
return data, 200
@@ -191,42 +183,49 @@ def patch(self, dataset_id):
raise NotFound("Dataset not found.")
parser = reqparse.RequestParser()
- parser.add_argument('name', nullable=False,
- help='type is required. Name must be between 1 to 40 characters.',
- type=_validate_name)
- parser.add_argument('description',
- location='json', store_missing=False,
- type=_validate_description_length)
- parser.add_argument('indexing_technique', type=str, location='json',
- choices=Dataset.INDEXING_TECHNIQUE_LIST,
- nullable=True,
- help='Invalid indexing technique.')
- parser.add_argument('permission', type=str, location='json', choices=(
- 'only_me', 'all_team_members', 'partial_members'), help='Invalid permission.'
- )
- parser.add_argument('embedding_model', type=str,
- location='json', help='Invalid embedding model.')
- parser.add_argument('embedding_model_provider', type=str,
- location='json', help='Invalid embedding model provider.')
- parser.add_argument('retrieval_model', type=dict, location='json', help='Invalid retrieval model.')
- parser.add_argument('partial_member_list', type=list, location='json', help='Invalid parent user list.')
+ parser.add_argument(
+ "name",
+ nullable=False,
+ help="type is required. Name must be between 1 to 40 characters.",
+ type=_validate_name,
+ )
+ parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length)
+ parser.add_argument(
+ "indexing_technique",
+ type=str,
+ location="json",
+ choices=Dataset.INDEXING_TECHNIQUE_LIST,
+ nullable=True,
+ help="Invalid indexing technique.",
+ )
+ parser.add_argument(
+ "permission",
+ type=str,
+ location="json",
+ choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
+ help="Invalid permission.",
+ )
+ parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.")
+ parser.add_argument(
+ "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider."
+ )
+ parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
+ parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
args = parser.parse_args()
data = request.get_json()
# check embedding model setting
- if data.get('indexing_technique') == 'high_quality':
- DatasetService.check_embedding_model_setting(dataset.tenant_id,
- data.get('embedding_model_provider'),
- data.get('embedding_model')
- )
+ if data.get("indexing_technique") == "high_quality":
+ DatasetService.check_embedding_model_setting(
+ dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model")
+ )
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
DatasetPermissionService.check_permission(
- current_user, dataset, data.get('permission'), data.get('partial_member_list')
+ current_user, dataset, data.get("permission"), data.get("partial_member_list")
)
- dataset = DatasetService.update_dataset(
- dataset_id_str, args, current_user)
+ dataset = DatasetService.update_dataset(dataset_id_str, args, current_user)
if dataset is None:
raise NotFound("Dataset not found.")
@@ -234,15 +233,19 @@ def patch(self, dataset_id):
result_data = marshal(dataset, dataset_detail_fields)
tenant_id = current_user.current_tenant_id
- if data.get('partial_member_list') and data.get('permission') == 'partial_members':
+ if data.get("partial_member_list") and data.get("permission") == "partial_members":
DatasetPermissionService.update_partial_member_list(
- tenant_id, dataset_id_str, data.get('partial_member_list')
+ tenant_id, dataset_id_str, data.get("partial_member_list")
)
- else:
+ # clear partial member list when permission is only_me or all_team_members
+ elif (
+ data.get("permission") == DatasetPermissionEnum.ONLY_ME
+ or data.get("permission") == DatasetPermissionEnum.ALL_TEAM
+ ):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
partial_member_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
- result_data.update({'partial_member_list': partial_member_list})
+ result_data.update({"partial_member_list": partial_member_list})
return result_data, 200
@@ -259,12 +262,13 @@ def delete(self, dataset_id):
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
else:
raise NotFound("Dataset not found.")
except services.errors.dataset.DatasetInUseError:
raise DatasetInUseError()
+
class DatasetUseCheckApi(Resource):
@setup_required
@login_required
@@ -273,10 +277,10 @@ def get(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset_is_using = DatasetService.dataset_use_check(dataset_id_str)
- return {'is_using': dataset_is_using}, 200
+ return {"is_using": dataset_is_using}, 200
-class DatasetQueryApi(Resource):
+class DatasetQueryApi(Resource):
@setup_required
@login_required
@account_initialization_required
@@ -291,51 +295,53 @@ def get(self, dataset_id):
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
- page = request.args.get('page', default=1, type=int)
- limit = request.args.get('limit', default=20, type=int)
+ page = request.args.get("page", default=1, type=int)
+ limit = request.args.get("limit", default=20, type=int)
- dataset_queries, total = DatasetService.get_dataset_queries(
- dataset_id=dataset.id,
- page=page,
- per_page=limit
- )
+ dataset_queries, total = DatasetService.get_dataset_queries(dataset_id=dataset.id, page=page, per_page=limit)
response = {
- 'data': marshal(dataset_queries, dataset_query_detail_fields),
- 'has_more': len(dataset_queries) == limit,
- 'limit': limit,
- 'total': total,
- 'page': page
+ "data": marshal(dataset_queries, dataset_query_detail_fields),
+ "has_more": len(dataset_queries) == limit,
+ "limit": limit,
+ "total": total,
+ "page": page,
}
return response, 200
class DatasetIndexingEstimateApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
- parser.add_argument('info_list', type=dict, required=True, nullable=True, location='json')
- parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
- parser.add_argument('indexing_technique', type=str, required=True,
- choices=Dataset.INDEXING_TECHNIQUE_LIST,
- nullable=True, location='json')
- parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
- parser.add_argument('dataset_id', type=str, required=False, nullable=False, location='json')
- parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False,
- location='json')
+ parser.add_argument("info_list", type=dict, required=True, nullable=True, location="json")
+ parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
+ parser.add_argument(
+ "indexing_technique",
+ type=str,
+ required=True,
+ choices=Dataset.INDEXING_TECHNIQUE_LIST,
+ nullable=True,
+ location="json",
+ )
+ parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
+ parser.add_argument("dataset_id", type=str, required=False, nullable=False, location="json")
+ parser.add_argument(
+ "doc_language", type=str, default="English", required=False, nullable=False, location="json"
+ )
args = parser.parse_args()
# validate args
DocumentService.estimate_args_validate(args)
extract_settings = []
- if args['info_list']['data_source_type'] == 'upload_file':
- file_ids = args['info_list']['file_info_list']['file_ids']
- file_details = db.session.query(UploadFile).filter(
- UploadFile.tenant_id == current_user.current_tenant_id,
- UploadFile.id.in_(file_ids)
- ).all()
+ if args["info_list"]["data_source_type"] == "upload_file":
+ file_ids = args["info_list"]["file_info_list"]["file_ids"]
+ file_details = (
+ db.session.query(UploadFile)
+ .filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids))
+ .all()
+ )
if file_details is None:
raise NotFound("File not found.")
@@ -343,55 +349,58 @@ def post(self):
if file_details:
for file_detail in file_details:
extract_setting = ExtractSetting(
- datasource_type="upload_file",
- upload_file=file_detail,
- document_model=args['doc_form']
+ datasource_type="upload_file", upload_file=file_detail, document_model=args["doc_form"]
)
extract_settings.append(extract_setting)
- elif args['info_list']['data_source_type'] == 'notion_import':
- notion_info_list = args['info_list']['notion_info_list']
+ elif args["info_list"]["data_source_type"] == "notion_import":
+ notion_info_list = args["info_list"]["notion_info_list"]
for notion_info in notion_info_list:
- workspace_id = notion_info['workspace_id']
- for page in notion_info['pages']:
+ workspace_id = notion_info["workspace_id"]
+ for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type="notion_import",
notion_info={
"notion_workspace_id": workspace_id,
- "notion_obj_id": page['page_id'],
- "notion_page_type": page['type'],
- "tenant_id": current_user.current_tenant_id
+ "notion_obj_id": page["page_id"],
+ "notion_page_type": page["type"],
+ "tenant_id": current_user.current_tenant_id,
},
- document_model=args['doc_form']
+ document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
- elif args['info_list']['data_source_type'] == 'website_crawl':
- website_info_list = args['info_list']['website_info_list']
- for url in website_info_list['urls']:
+ elif args["info_list"]["data_source_type"] == "website_crawl":
+ website_info_list = args["info_list"]["website_info_list"]
+ for url in website_info_list["urls"]:
extract_setting = ExtractSetting(
datasource_type="website_crawl",
website_info={
- "provider": website_info_list['provider'],
- "job_id": website_info_list['job_id'],
+ "provider": website_info_list["provider"],
+ "job_id": website_info_list["job_id"],
"url": url,
"tenant_id": current_user.current_tenant_id,
- "mode": 'crawl',
- "only_main_content": website_info_list['only_main_content']
+ "mode": "crawl",
+ "only_main_content": website_info_list["only_main_content"],
},
- document_model=args['doc_form']
+ document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
else:
- raise ValueError('Data source type not support')
+ raise ValueError("Data source type not support")
indexing_runner = IndexingRunner()
try:
- response = indexing_runner.indexing_estimate(current_user.current_tenant_id, extract_settings,
- args['process_rule'], args['doc_form'],
- args['doc_language'], args['dataset_id'],
- args['indexing_technique'])
+ response = indexing_runner.indexing_estimate(
+ current_user.current_tenant_id,
+ extract_settings,
+ args["process_rule"],
+ args["doc_form"],
+ args["doc_language"],
+ args["dataset_id"],
+ args["indexing_technique"],
+ )
except LLMBadRequestError:
raise ProviderNotInitializeError(
- "No Embedding Model available. Please configure a valid provider "
- "in the Settings -> Model Provider.")
+ "No Embedding Model available. Please configure a valid provider " "in the Settings -> Model Provider."
+ )
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
@@ -401,7 +410,6 @@ def post(self):
class DatasetRelatedAppListApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@@ -425,52 +433,52 @@ def get(self, dataset_id):
if app_model:
related_apps.append(app_model)
- return {
- 'data': related_apps,
- 'total': len(related_apps)
- }, 200
+ return {"data": related_apps, "total": len(related_apps)}, 200
class DatasetIndexingStatusApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
dataset_id = str(dataset_id)
- documents = db.session.query(Document).filter(
- Document.dataset_id == dataset_id,
- Document.tenant_id == current_user.current_tenant_id
- ).all()
+ documents = (
+ db.session.query(Document)
+ .filter(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id)
+ .all()
+ )
documents_status = []
for document in documents:
- completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
- DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
- total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
+ completed_segments = DocumentSegment.query.filter(
+ DocumentSegment.completed_at.isnot(None),
+ DocumentSegment.document_id == str(document.id),
+ DocumentSegment.status != "re_segment",
+ ).count()
+ total_segments = DocumentSegment.query.filter(
+ DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment"
+ ).count()
document.completed_segments = completed_segments
document.total_segments = total_segments
documents_status.append(marshal(document, document_status_fields))
- data = {
- 'data': documents_status
- }
+ data = {"data": documents_status}
return data
class DatasetApiKeyApi(Resource):
max_keys = 10
- token_prefix = 'dataset-'
- resource_type = 'dataset'
+ token_prefix = "dataset-"
+ resource_type = "dataset"
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_key_list)
def get(self):
- keys = db.session.query(ApiToken). \
- filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id). \
- all()
+ keys = (
+ db.session.query(ApiToken)
+ .filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
+ .all()
+ )
return {"items": keys}
@setup_required
@@ -482,15 +490,17 @@ def post(self):
if not current_user.is_admin_or_owner:
raise Forbidden()
- current_key_count = db.session.query(ApiToken). \
- filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id). \
- count()
+ current_key_count = (
+ db.session.query(ApiToken)
+ .filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
+ .count()
+ )
if current_key_count >= self.max_keys:
flask_restful.abort(
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
- code='max_keys_exceeded'
+ code="max_keys_exceeded",
)
key = ApiToken.generate_api_key(self.token_prefix, 24)
@@ -504,7 +514,7 @@ def post(self):
class DatasetApiDeleteApi(Resource):
- resource_type = 'dataset'
+ resource_type = "dataset"
@setup_required
@login_required
@@ -516,18 +526,23 @@ def delete(self, api_key_id):
if not current_user.is_admin_or_owner:
raise Forbidden()
- key = db.session.query(ApiToken). \
- filter(ApiToken.tenant_id == current_user.current_tenant_id, ApiToken.type == self.resource_type,
- ApiToken.id == api_key_id). \
- first()
+ key = (
+ db.session.query(ApiToken)
+ .filter(
+ ApiToken.tenant_id == current_user.current_tenant_id,
+ ApiToken.type == self.resource_type,
+ ApiToken.id == api_key_id,
+ )
+ .first()
+ )
if key is None:
- flask_restful.abort(404, message='API key not found')
+ flask_restful.abort(404, message="API key not found")
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
db.session.commit()
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class DatasetApiBaseUrlApi(Resource):
@@ -536,8 +551,10 @@ class DatasetApiBaseUrlApi(Resource):
@account_initialization_required
def get(self):
return {
- 'api_base_url': (dify_config.SERVICE_API_URL if dify_config.SERVICE_API_URL
- else request.host_url.rstrip('/')) + '/v1'
+ "api_base_url": (
+ dify_config.SERVICE_API_URL if dify_config.SERVICE_API_URL else request.host_url.rstrip("/")
+ )
+ + "/v1"
}
@@ -548,15 +565,26 @@ class DatasetRetrievalSettingApi(Resource):
def get(self):
vector_type = dify_config.VECTOR_STORE
match vector_type:
- case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
- return {
- 'retrieval_method': [
- RetrievalMethod.SEMANTIC_SEARCH.value
- ]
- }
- case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH | VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE:
+ case (
+ VectorType.MILVUS
+ | VectorType.RELYT
+ | VectorType.PGVECTOR
+ | VectorType.TIDB_VECTOR
+ | VectorType.CHROMA
+ | VectorType.TENCENT
+ ):
+ return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
+ case (
+ VectorType.QDRANT
+ | VectorType.WEAVIATE
+ | VectorType.OPENSEARCH
+ | VectorType.ANALYTICDB
+ | VectorType.MYSCALE
+ | VectorType.ORACLE
+ | VectorType.ELASTICSEARCH
+ ):
return {
- 'retrieval_method': [
+ "retrieval_method": [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
@@ -572,15 +600,27 @@ class DatasetRetrievalSettingMockApi(Resource):
@account_initialization_required
def get(self, vector_type):
match vector_type:
- case VectorType.MILVUS | VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT:
+ case (
+ VectorType.MILVUS
+ | VectorType.RELYT
+ | VectorType.TIDB_VECTOR
+ | VectorType.CHROMA
+ | VectorType.TENCENT
+ | VectorType.PGVECTO_RS
+ ):
+ return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
+ case (
+ VectorType.QDRANT
+ | VectorType.WEAVIATE
+ | VectorType.OPENSEARCH
+ | VectorType.ANALYTICDB
+ | VectorType.MYSCALE
+ | VectorType.ORACLE
+ | VectorType.ELASTICSEARCH
+ | VectorType.PGVECTOR
+ ):
return {
- 'retrieval_method': [
- RetrievalMethod.SEMANTIC_SEARCH.value
- ]
- }
- case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH| VectorType.ANALYTICDB | VectorType.MYSCALE | VectorType.ORACLE:
- return {
- 'retrieval_method': [
+ "retrieval_method": [
RetrievalMethod.SEMANTIC_SEARCH.value,
RetrievalMethod.FULL_TEXT_SEARCH.value,
RetrievalMethod.HYBRID_SEARCH.value,
@@ -590,7 +630,6 @@ def get(self, vector_type):
raise ValueError(f"Unsupported vector db type {vector_type}.")
-
class DatasetErrorDocs(Resource):
@setup_required
@login_required
@@ -602,10 +641,7 @@ def get(self, dataset_id):
raise NotFound("Dataset not found.")
results = DocumentService.get_error_documents_by_dataset_id(dataset_id_str)
- return {
- 'data': [marshal(item, document_status_fields) for item in results],
- 'total': len(results)
- }, 200
+ return {"data": [marshal(item, document_status_fields) for item in results], "total": len(results)}, 200
class DatasetPermissionUserListApi(Resource):
@@ -625,21 +661,21 @@ def get(self, dataset_id):
partial_members_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
return {
- 'data': partial_members_list,
+ "data": partial_members_list,
}, 200
-api.add_resource(DatasetListApi, '/datasets')
-api.add_resource(DatasetApi, '/datasets/')
-api.add_resource(DatasetUseCheckApi, '/datasets//use-check')
-api.add_resource(DatasetQueryApi, '/datasets//queries')
-api.add_resource(DatasetErrorDocs, '/datasets//error-docs')
-api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
-api.add_resource(DatasetRelatedAppListApi, '/datasets//related-apps')
-api.add_resource(DatasetIndexingStatusApi, '/datasets//indexing-status')
-api.add_resource(DatasetApiKeyApi, '/datasets/api-keys')
-api.add_resource(DatasetApiDeleteApi, '/datasets/api-keys/')
-api.add_resource(DatasetApiBaseUrlApi, '/datasets/api-base-info')
-api.add_resource(DatasetRetrievalSettingApi, '/datasets/retrieval-setting')
-api.add_resource(DatasetRetrievalSettingMockApi, '/datasets/retrieval-setting/')
-api.add_resource(DatasetPermissionUserListApi, '/datasets//permission-part-users')
+api.add_resource(DatasetListApi, "/datasets")
+api.add_resource(DatasetApi, "/datasets/")
+api.add_resource(DatasetUseCheckApi, "/datasets//use-check")
+api.add_resource(DatasetQueryApi, "/datasets//queries")
+api.add_resource(DatasetErrorDocs, "/datasets//error-docs")
+api.add_resource(DatasetIndexingEstimateApi, "/datasets/indexing-estimate")
+api.add_resource(DatasetRelatedAppListApi, "/datasets//related-apps")
+api.add_resource(DatasetIndexingStatusApi, "/datasets//indexing-status")
+api.add_resource(DatasetApiKeyApi, "/datasets/api-keys")
+api.add_resource(DatasetApiDeleteApi, "/datasets/api-keys/")
+api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info")
+api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
+api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/")
+api.add_resource(DatasetPermissionUserListApi, "/datasets//permission-part-users")
diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py
index afe0ca7c69b2b7..6bc29a86435fa4 100644
--- a/api/controllers/console/datasets/datasets_document.py
+++ b/api/controllers/console/datasets/datasets_document.py
@@ -57,7 +57,7 @@ class DocumentResource(Resource):
def get_document(self, dataset_id: str, document_id: str) -> Document:
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
@@ -67,17 +67,17 @@ def get_document(self, dataset_id: str, document_id: str) -> Document:
document = DocumentService.get_document(dataset_id, document_id)
if not document:
- raise NotFound('Document not found.')
+ raise NotFound("Document not found.")
if document.tenant_id != current_user.current_tenant_id:
- raise Forbidden('No permission.')
+ raise Forbidden("No permission.")
return document
def get_batch_documents(self, dataset_id: str, batch: str) -> list[Document]:
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
@@ -87,7 +87,7 @@ def get_batch_documents(self, dataset_id: str, batch: str) -> list[Document]:
documents = DocumentService.get_batch_documents(dataset_id, batch)
if not documents:
- raise NotFound('Documents not found.')
+ raise NotFound("Documents not found.")
return documents
@@ -99,11 +99,11 @@ class GetProcessRuleApi(Resource):
def get(self):
req_data = request.args
- document_id = req_data.get('document_id')
+ document_id = req_data.get("document_id")
# get default rules
- mode = DocumentService.DEFAULT_RULES['mode']
- rules = DocumentService.DEFAULT_RULES['rules']
+ mode = DocumentService.DEFAULT_RULES["mode"]
+ rules = DocumentService.DEFAULT_RULES["rules"]
if document_id:
# get the latest process rule
document = Document.query.get_or_404(document_id)
@@ -111,7 +111,7 @@ def get(self):
dataset = DatasetService.get_dataset(document.dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
@@ -119,19 +119,18 @@ def get(self):
raise Forbidden(str(e))
# get the latest process rule
- dataset_process_rule = db.session.query(DatasetProcessRule). \
- filter(DatasetProcessRule.dataset_id == document.dataset_id). \
- order_by(DatasetProcessRule.created_at.desc()). \
- limit(1). \
- one_or_none()
+ dataset_process_rule = (
+ db.session.query(DatasetProcessRule)
+ .filter(DatasetProcessRule.dataset_id == document.dataset_id)
+ .order_by(DatasetProcessRule.created_at.desc())
+ .limit(1)
+ .one_or_none()
+ )
if dataset_process_rule:
mode = dataset_process_rule.mode
rules = dataset_process_rule.rules_dict
- return {
- 'mode': mode,
- 'rules': rules
- }
+ return {"mode": mode, "rules": rules}
class DatasetDocumentListApi(Resource):
@@ -140,92 +139,99 @@ class DatasetDocumentListApi(Resource):
@account_initialization_required
def get(self, dataset_id):
dataset_id = str(dataset_id)
- page = request.args.get('page', default=1, type=int)
- limit = request.args.get('limit', default=20, type=int)
- search = request.args.get('keyword', default=None, type=str)
- sort = request.args.get('sort', default='-created_at', type=str)
+ page = request.args.get("page", default=1, type=int)
+ limit = request.args.get("limit", default=20, type=int)
+ search = request.args.get("keyword", default=None, type=str)
+ sort = request.args.get("sort", default="-created_at", type=str)
# "yes", "true", "t", "y", "1" convert to True, while others convert to False.
try:
- fetch = string_to_bool(request.args.get('fetch', default='false'))
+ fetch = string_to_bool(request.args.get("fetch", default="false"))
except (ArgumentTypeError, ValueError, Exception) as e:
fetch = False
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
- query = Document.query.filter_by(
- dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id)
+ query = Document.query.filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id)
if search:
- search = f'%{search}%'
+ search = f"%{search}%"
query = query.filter(Document.name.like(search))
- if sort.startswith('-'):
+ if sort.startswith("-"):
sort_logic = desc
sort = sort[1:]
else:
sort_logic = asc
- if sort == 'hit_count':
- sub_query = db.select(DocumentSegment.document_id,
- db.func.sum(DocumentSegment.hit_count).label("total_hit_count")) \
- .group_by(DocumentSegment.document_id) \
+ if sort == "hit_count":
+ sub_query = (
+ db.select(DocumentSegment.document_id, db.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
+ .group_by(DocumentSegment.document_id)
.subquery()
+ )
- query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id) \
- .order_by(sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)))
- elif sort == 'created_at':
- query = query.order_by(sort_logic(Document.created_at))
+ query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
+ sort_logic(db.func.coalesce(sub_query.c.total_hit_count, 0)),
+ sort_logic(Document.position),
+ )
+ elif sort == "created_at":
+ query = query.order_by(
+ sort_logic(Document.created_at),
+ sort_logic(Document.position),
+ )
else:
- query = query.order_by(desc(Document.created_at))
+ query = query.order_by(
+ desc(Document.created_at),
+ desc(Document.position),
+ )
- paginated_documents = query.paginate(
- page=page, per_page=limit, max_per_page=100, error_out=False)
+ paginated_documents = query.paginate(page=page, per_page=limit, max_per_page=100, error_out=False)
documents = paginated_documents.items
if fetch:
for document in documents:
- completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
- DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
- total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
+ completed_segments = DocumentSegment.query.filter(
+ DocumentSegment.completed_at.isnot(None),
+ DocumentSegment.document_id == str(document.id),
+ DocumentSegment.status != "re_segment",
+ ).count()
+ total_segments = DocumentSegment.query.filter(
+ DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment"
+ ).count()
document.completed_segments = completed_segments
document.total_segments = total_segments
data = marshal(documents, document_with_segments_fields)
else:
data = marshal(documents, document_fields)
response = {
- 'data': data,
- 'has_more': len(documents) == limit,
- 'limit': limit,
- 'total': paginated_documents.total,
- 'page': page
+ "data": data,
+ "has_more": len(documents) == limit,
+ "limit": limit,
+ "total": paginated_documents.total,
+ "page": page,
}
return response
- documents_and_batch_fields = {
- 'documents': fields.List(fields.Nested(document_fields)),
- 'batch': fields.String
- }
+ documents_and_batch_fields = {"documents": fields.List(fields.Nested(document_fields)), "batch": fields.String}
@setup_required
@login_required
@account_initialization_required
@marshal_with(documents_and_batch_fields)
- @cloud_edition_billing_resource_check('vector_space')
+ @cloud_edition_billing_resource_check("vector_space")
def post(self, dataset_id):
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_dataset_editor:
@@ -237,21 +243,22 @@ def post(self, dataset_id):
raise Forbidden(str(e))
parser = reqparse.RequestParser()
- parser.add_argument('indexing_technique', type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False,
- location='json')
- parser.add_argument('data_source', type=dict, required=False, location='json')
- parser.add_argument('process_rule', type=dict, required=False, location='json')
- parser.add_argument('duplicate', type=bool, default=True, nullable=False, location='json')
- parser.add_argument('original_document_id', type=str, required=False, location='json')
- parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
- parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False,
- location='json')
- parser.add_argument('retrieval_model', type=dict, required=False, nullable=False,
- location='json')
+ parser.add_argument(
+ "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
+ )
+ parser.add_argument("data_source", type=dict, required=False, location="json")
+ parser.add_argument("process_rule", type=dict, required=False, location="json")
+ parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json")
+ parser.add_argument("original_document_id", type=str, required=False, location="json")
+ parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
+ parser.add_argument(
+ "doc_language", type=str, default="English", required=False, nullable=False, location="json"
+ )
+ parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
args = parser.parse_args()
- if not dataset.indexing_technique and not args['indexing_technique']:
- raise ValueError('indexing_technique is required.')
+ if not dataset.indexing_technique and not args["indexing_technique"]:
+ raise ValueError("indexing_technique is required.")
# validate args
DocumentService.document_create_args_validate(args)
@@ -265,51 +272,53 @@ def post(self, dataset_id):
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
- return {
- 'documents': documents,
- 'batch': batch
- }
+ return {"documents": documents, "batch": batch}
class DatasetInitApi(Resource):
-
@setup_required
@login_required
@account_initialization_required
@marshal_with(dataset_and_document_fields)
- @cloud_edition_billing_resource_check('vector_space')
+ @cloud_edition_billing_resource_check("vector_space")
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
- parser.add_argument('indexing_technique', type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, required=True,
- nullable=False, location='json')
- parser.add_argument('data_source', type=dict, required=True, nullable=True, location='json')
- parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
- parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
- parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False,
- location='json')
- parser.add_argument('retrieval_model', type=dict, required=False, nullable=False,
- location='json')
+ parser.add_argument(
+ "indexing_technique",
+ type=str,
+ choices=Dataset.INDEXING_TECHNIQUE_LIST,
+ required=True,
+ nullable=False,
+ location="json",
+ )
+ parser.add_argument("data_source", type=dict, required=True, nullable=True, location="json")
+ parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
+ parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
+ parser.add_argument(
+ "doc_language", type=str, default="English", required=False, nullable=False, location="json"
+ )
+ parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
args = parser.parse_args()
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
if not current_user.is_dataset_editor:
raise Forbidden()
- if args['indexing_technique'] == 'high_quality':
+ if args["indexing_technique"] == "high_quality":
try:
model_manager = ModelManager()
model_manager.get_default_model_instance(
- tenant_id=current_user.current_tenant_id,
- model_type=ModelType.TEXT_EMBEDDING
+ tenant_id=current_user.current_tenant_id, model_type=ModelType.TEXT_EMBEDDING
)
except InvokeAuthorizationError:
raise ProviderNotInitializeError(
"No Embedding Model available. Please configure a valid provider "
- "in the Settings -> Model Provider.")
+ "in the Settings -> Model Provider."
+ )
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -318,9 +327,7 @@ def post(self):
try:
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
- tenant_id=current_user.current_tenant_id,
- document_data=args,
- account=current_user
+ tenant_id=current_user.current_tenant_id, document_data=args, account=current_user
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -329,17 +336,12 @@ def post(self):
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
- response = {
- 'dataset': dataset,
- 'documents': documents,
- 'batch': batch
- }
+ response = {"dataset": dataset, "documents": documents, "batch": batch}
return response
class DocumentIndexingEstimateApi(DocumentResource):
-
@setup_required
@login_required
@account_initialization_required
@@ -348,50 +350,49 @@ def get(self, dataset_id, document_id):
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
- if document.indexing_status in ['completed', 'error']:
+ if document.indexing_status in ["completed", "error"]:
raise DocumentAlreadyFinishedError()
data_process_rule = document.dataset_process_rule
data_process_rule_dict = data_process_rule.to_dict()
- response = {
- "tokens": 0,
- "total_price": 0,
- "currency": "USD",
- "total_segments": 0,
- "preview": []
- }
+ response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}
- if document.data_source_type == 'upload_file':
+ if document.data_source_type == "upload_file":
data_source_info = document.data_source_info_dict
- if data_source_info and 'upload_file_id' in data_source_info:
- file_id = data_source_info['upload_file_id']
+ if data_source_info and "upload_file_id" in data_source_info:
+ file_id = data_source_info["upload_file_id"]
- file = db.session.query(UploadFile).filter(
- UploadFile.tenant_id == document.tenant_id,
- UploadFile.id == file_id
- ).first()
+ file = (
+ db.session.query(UploadFile)
+ .filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
+ .first()
+ )
# raise error if file not found
if not file:
- raise NotFound('File not found.')
+ raise NotFound("File not found.")
extract_setting = ExtractSetting(
- datasource_type="upload_file",
- upload_file=file,
- document_model=document.doc_form
+ datasource_type="upload_file", upload_file=file, document_model=document.doc_form
)
indexing_runner = IndexingRunner()
try:
- response = indexing_runner.indexing_estimate(current_user.current_tenant_id, [extract_setting],
- data_process_rule_dict, document.doc_form,
- 'English', dataset_id)
+ response = indexing_runner.indexing_estimate(
+ current_user.current_tenant_id,
+ [extract_setting],
+ data_process_rule_dict,
+ document.doc_form,
+ "English",
+ dataset_id,
+ )
except LLMBadRequestError:
raise ProviderNotInitializeError(
"No Embedding Model available. Please configure a valid provider "
- "in the Settings -> Model Provider.")
+ "in the Settings -> Model Provider."
+ )
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
@@ -401,7 +402,6 @@ def get(self, dataset_id, document_id):
class DocumentBatchIndexingEstimateApi(DocumentResource):
-
@setup_required
@login_required
@account_initialization_required
@@ -409,13 +409,7 @@ def get(self, dataset_id, batch):
dataset_id = str(dataset_id)
batch = str(batch)
documents = self.get_batch_documents(dataset_id, batch)
- response = {
- "tokens": 0,
- "total_price": 0,
- "currency": "USD",
- "total_segments": 0,
- "preview": []
- }
+ response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}
if not documents:
return response
data_process_rule = documents[0].dataset_process_rule
@@ -423,82 +417,83 @@ def get(self, dataset_id, batch):
info_list = []
extract_settings = []
for document in documents:
- if document.indexing_status in ['completed', 'error']:
+ if document.indexing_status in ["completed", "error"]:
raise DocumentAlreadyFinishedError()
data_source_info = document.data_source_info_dict
# format document files info
- if data_source_info and 'upload_file_id' in data_source_info:
- file_id = data_source_info['upload_file_id']
+ if data_source_info and "upload_file_id" in data_source_info:
+ file_id = data_source_info["upload_file_id"]
info_list.append(file_id)
# format document notion info
- elif data_source_info and 'notion_workspace_id' in data_source_info and 'notion_page_id' in data_source_info:
+ elif (
+ data_source_info and "notion_workspace_id" in data_source_info and "notion_page_id" in data_source_info
+ ):
pages = []
- page = {
- 'page_id': data_source_info['notion_page_id'],
- 'type': data_source_info['type']
- }
+ page = {"page_id": data_source_info["notion_page_id"], "type": data_source_info["type"]}
pages.append(page)
- notion_info = {
- 'workspace_id': data_source_info['notion_workspace_id'],
- 'pages': pages
- }
+ notion_info = {"workspace_id": data_source_info["notion_workspace_id"], "pages": pages}
info_list.append(notion_info)
- if document.data_source_type == 'upload_file':
- file_id = data_source_info['upload_file_id']
- file_detail = db.session.query(UploadFile).filter(
- UploadFile.tenant_id == current_user.current_tenant_id,
- UploadFile.id == file_id
- ).first()
+ if document.data_source_type == "upload_file":
+ file_id = data_source_info["upload_file_id"]
+ file_detail = (
+ db.session.query(UploadFile)
+ .filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
+ .first()
+ )
if file_detail is None:
raise NotFound("File not found.")
extract_setting = ExtractSetting(
- datasource_type="upload_file",
- upload_file=file_detail,
- document_model=document.doc_form
+ datasource_type="upload_file", upload_file=file_detail, document_model=document.doc_form
)
extract_settings.append(extract_setting)
- elif document.data_source_type == 'notion_import':
+ elif document.data_source_type == "notion_import":
extract_setting = ExtractSetting(
datasource_type="notion_import",
notion_info={
- "notion_workspace_id": data_source_info['notion_workspace_id'],
- "notion_obj_id": data_source_info['notion_page_id'],
- "notion_page_type": data_source_info['type'],
- "tenant_id": current_user.current_tenant_id
+ "notion_workspace_id": data_source_info["notion_workspace_id"],
+ "notion_obj_id": data_source_info["notion_page_id"],
+ "notion_page_type": data_source_info["type"],
+ "tenant_id": current_user.current_tenant_id,
},
- document_model=document.doc_form
+ document_model=document.doc_form,
)
extract_settings.append(extract_setting)
- elif document.data_source_type == 'website_crawl':
+ elif document.data_source_type == "website_crawl":
extract_setting = ExtractSetting(
datasource_type="website_crawl",
website_info={
- "provider": data_source_info['provider'],
- "job_id": data_source_info['job_id'],
- "url": data_source_info['url'],
+ "provider": data_source_info["provider"],
+ "job_id": data_source_info["job_id"],
+ "url": data_source_info["url"],
"tenant_id": current_user.current_tenant_id,
- "mode": data_source_info['mode'],
- "only_main_content": data_source_info['only_main_content']
+ "mode": data_source_info["mode"],
+ "only_main_content": data_source_info["only_main_content"],
},
- document_model=document.doc_form
+ document_model=document.doc_form,
)
extract_settings.append(extract_setting)
else:
- raise ValueError('Data source type not support')
+ raise ValueError("Data source type not support")
indexing_runner = IndexingRunner()
try:
- response = indexing_runner.indexing_estimate(current_user.current_tenant_id, extract_settings,
- data_process_rule_dict, document.doc_form,
- 'English', dataset_id)
+ response = indexing_runner.indexing_estimate(
+ current_user.current_tenant_id,
+ extract_settings,
+ data_process_rule_dict,
+ document.doc_form,
+ "English",
+ dataset_id,
+ )
except LLMBadRequestError:
raise ProviderNotInitializeError(
"No Embedding Model available. Please configure a valid provider "
- "in the Settings -> Model Provider.")
+ "in the Settings -> Model Provider."
+ )
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except Exception as e:
@@ -507,7 +502,6 @@ def get(self, dataset_id, batch):
class DocumentBatchIndexingStatusApi(DocumentResource):
-
@setup_required
@login_required
@account_initialization_required
@@ -517,24 +511,24 @@ def get(self, dataset_id, batch):
documents = self.get_batch_documents(dataset_id, batch)
documents_status = []
for document in documents:
- completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
- DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
- total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
- DocumentSegment.status != 're_segment').count()
+ completed_segments = DocumentSegment.query.filter(
+ DocumentSegment.completed_at.isnot(None),
+ DocumentSegment.document_id == str(document.id),
+ DocumentSegment.status != "re_segment",
+ ).count()
+ total_segments = DocumentSegment.query.filter(
+ DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment"
+ ).count()
document.completed_segments = completed_segments
document.total_segments = total_segments
if document.is_paused:
- document.indexing_status = 'paused'
+ document.indexing_status = "paused"
documents_status.append(marshal(document, document_status_fields))
- data = {
- 'data': documents_status
- }
+ data = {"data": documents_status}
return data
class DocumentIndexingStatusApi(DocumentResource):
-
@setup_required
@login_required
@account_initialization_required
@@ -543,25 +537,24 @@ def get(self, dataset_id, document_id):
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
- completed_segments = DocumentSegment.query \
- .filter(DocumentSegment.completed_at.isnot(None),
- DocumentSegment.document_id == str(document_id),
- DocumentSegment.status != 're_segment') \
- .count()
- total_segments = DocumentSegment.query \
- .filter(DocumentSegment.document_id == str(document_id),
- DocumentSegment.status != 're_segment') \
- .count()
+ completed_segments = DocumentSegment.query.filter(
+ DocumentSegment.completed_at.isnot(None),
+ DocumentSegment.document_id == str(document_id),
+ DocumentSegment.status != "re_segment",
+ ).count()
+ total_segments = DocumentSegment.query.filter(
+ DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment"
+ ).count()
document.completed_segments = completed_segments
document.total_segments = total_segments
if document.is_paused:
- document.indexing_status = 'paused'
+ document.indexing_status = "paused"
return marshal(document, document_status_fields)
class DocumentDetailApi(DocumentResource):
- METADATA_CHOICES = {'all', 'only', 'without'}
+ METADATA_CHOICES = {"all", "only", "without"}
@setup_required
@login_required
@@ -571,77 +564,75 @@ def get(self, dataset_id, document_id):
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
- metadata = request.args.get('metadata', 'all')
+ metadata = request.args.get("metadata", "all")
if metadata not in self.METADATA_CHOICES:
- raise InvalidMetadataError(f'Invalid metadata value: {metadata}')
+ raise InvalidMetadataError(f"Invalid metadata value: {metadata}")
- if metadata == 'only':
- response = {
- 'id': document.id,
- 'doc_type': document.doc_type,
- 'doc_metadata': document.doc_metadata
- }
- elif metadata == 'without':
+ if metadata == "only":
+ response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata}
+ elif metadata == "without":
process_rules = DatasetService.get_process_rules(dataset_id)
data_source_info = document.data_source_detail_dict
response = {
- 'id': document.id,
- 'position': document.position,
- 'data_source_type': document.data_source_type,
- 'data_source_info': data_source_info,
- 'dataset_process_rule_id': document.dataset_process_rule_id,
- 'dataset_process_rule': process_rules,
- 'name': document.name,
- 'created_from': document.created_from,
- 'created_by': document.created_by,
- 'created_at': document.created_at.timestamp(),
- 'tokens': document.tokens,
- 'indexing_status': document.indexing_status,
- 'completed_at': int(document.completed_at.timestamp()) if document.completed_at else None,
- 'updated_at': int(document.updated_at.timestamp()) if document.updated_at else None,
- 'indexing_latency': document.indexing_latency,
- 'error': document.error,
- 'enabled': document.enabled,
- 'disabled_at': int(document.disabled_at.timestamp()) if document.disabled_at else None,
- 'disabled_by': document.disabled_by,
- 'archived': document.archived,
- 'segment_count': document.segment_count,
- 'average_segment_length': document.average_segment_length,
- 'hit_count': document.hit_count,
- 'display_status': document.display_status,
- 'doc_form': document.doc_form
+ "id": document.id,
+ "position": document.position,
+ "data_source_type": document.data_source_type,
+ "data_source_info": data_source_info,
+ "dataset_process_rule_id": document.dataset_process_rule_id,
+ "dataset_process_rule": process_rules,
+ "name": document.name,
+ "created_from": document.created_from,
+ "created_by": document.created_by,
+ "created_at": document.created_at.timestamp(),
+ "tokens": document.tokens,
+ "indexing_status": document.indexing_status,
+ "completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
+ "updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
+ "indexing_latency": document.indexing_latency,
+ "error": document.error,
+ "enabled": document.enabled,
+ "disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
+ "disabled_by": document.disabled_by,
+ "archived": document.archived,
+ "segment_count": document.segment_count,
+ "average_segment_length": document.average_segment_length,
+ "hit_count": document.hit_count,
+ "display_status": document.display_status,
+ "doc_form": document.doc_form,
+ "doc_language": document.doc_language,
}
else:
process_rules = DatasetService.get_process_rules(dataset_id)
data_source_info = document.data_source_detail_dict
response = {
- 'id': document.id,
- 'position': document.position,
- 'data_source_type': document.data_source_type,
- 'data_source_info': data_source_info,
- 'dataset_process_rule_id': document.dataset_process_rule_id,
- 'dataset_process_rule': process_rules,
- 'name': document.name,
- 'created_from': document.created_from,
- 'created_by': document.created_by,
- 'created_at': document.created_at.timestamp(),
- 'tokens': document.tokens,
- 'indexing_status': document.indexing_status,
- 'completed_at': int(document.completed_at.timestamp()) if document.completed_at else None,
- 'updated_at': int(document.updated_at.timestamp()) if document.updated_at else None,
- 'indexing_latency': document.indexing_latency,
- 'error': document.error,
- 'enabled': document.enabled,
- 'disabled_at': int(document.disabled_at.timestamp()) if document.disabled_at else None,
- 'disabled_by': document.disabled_by,
- 'archived': document.archived,
- 'doc_type': document.doc_type,
- 'doc_metadata': document.doc_metadata,
- 'segment_count': document.segment_count,
- 'average_segment_length': document.average_segment_length,
- 'hit_count': document.hit_count,
- 'display_status': document.display_status,
- 'doc_form': document.doc_form
+ "id": document.id,
+ "position": document.position,
+ "data_source_type": document.data_source_type,
+ "data_source_info": data_source_info,
+ "dataset_process_rule_id": document.dataset_process_rule_id,
+ "dataset_process_rule": process_rules,
+ "name": document.name,
+ "created_from": document.created_from,
+ "created_by": document.created_by,
+ "created_at": document.created_at.timestamp(),
+ "tokens": document.tokens,
+ "indexing_status": document.indexing_status,
+ "completed_at": int(document.completed_at.timestamp()) if document.completed_at else None,
+ "updated_at": int(document.updated_at.timestamp()) if document.updated_at else None,
+ "indexing_latency": document.indexing_latency,
+ "error": document.error,
+ "enabled": document.enabled,
+ "disabled_at": int(document.disabled_at.timestamp()) if document.disabled_at else None,
+ "disabled_by": document.disabled_by,
+ "archived": document.archived,
+ "doc_type": document.doc_type,
+ "doc_metadata": document.doc_metadata,
+ "segment_count": document.segment_count,
+ "average_segment_length": document.average_segment_length,
+ "hit_count": document.hit_count,
+ "display_status": document.display_status,
+ "doc_form": document.doc_form,
+ "doc_language": document.doc_language,
}
return response, 200
@@ -662,7 +653,7 @@ def patch(self, dataset_id, document_id, action):
if action == "pause":
if document.indexing_status != "indexing":
- raise InvalidActionError('Document not in indexing state.')
+ raise InvalidActionError("Document not in indexing state.")
document.paused_by = current_user.id
document.paused_at = datetime.now(timezone.utc).replace(tzinfo=None)
@@ -671,7 +662,7 @@ def patch(self, dataset_id, document_id, action):
elif action == "resume":
if document.indexing_status not in ["paused", "error"]:
- raise InvalidActionError('Document not in paused or error state.')
+ raise InvalidActionError("Document not in paused or error state.")
document.paused_by = None
document.paused_at = None
@@ -680,7 +671,7 @@ def patch(self, dataset_id, document_id, action):
else:
raise InvalidActionError()
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
class DocumentDeleteApi(DocumentResource):
@@ -701,9 +692,9 @@ def delete(self, dataset_id, document_id):
try:
DocumentService.delete_document(document)
except services.errors.document.DocumentIndexingError:
- raise DocumentIndexingError('Cannot delete document during indexing.')
+ raise DocumentIndexingError("Cannot delete document during indexing.")
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class DocumentMetadataApi(DocumentResource):
@@ -717,26 +708,26 @@ def put(self, dataset_id, document_id):
req_data = request.get_json()
- doc_type = req_data.get('doc_type')
- doc_metadata = req_data.get('doc_metadata')
+ doc_type = req_data.get("doc_type")
+ doc_metadata = req_data.get("doc_metadata")
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
if doc_type is None or doc_metadata is None:
- raise ValueError('Both doc_type and doc_metadata must be provided.')
+ raise ValueError("Both doc_type and doc_metadata must be provided.")
if doc_type not in DocumentService.DOCUMENT_METADATA_SCHEMA:
- raise ValueError('Invalid doc_type.')
+ raise ValueError("Invalid doc_type.")
if not isinstance(doc_metadata, dict):
- raise ValueError('doc_metadata must be a dictionary.')
+ raise ValueError("doc_metadata must be a dictionary.")
metadata_schema = DocumentService.DOCUMENT_METADATA_SCHEMA[doc_type]
document.doc_metadata = {}
- if doc_type == 'others':
+ if doc_type == "others":
document.doc_metadata = doc_metadata
else:
for key, value_type in metadata_schema.items():
@@ -748,14 +739,14 @@ def put(self, dataset_id, document_id):
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit()
- return {'result': 'success', 'message': 'Document metadata updated.'}, 200
+ return {"result": "success", "message": "Document metadata updated."}, 200
class DocumentStatusApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
- @cloud_edition_billing_resource_check('vector_space')
+ @cloud_edition_billing_resource_check("vector_space")
def patch(self, dataset_id, document_id, action):
dataset_id = str(dataset_id)
document_id = str(document_id)
@@ -775,14 +766,14 @@ def patch(self, dataset_id, document_id, action):
document = self.get_document(dataset_id, document_id)
- indexing_cache_key = 'document_{}_indexing'.format(document.id)
+ indexing_cache_key = "document_{}_indexing".format(document.id)
cache_result = redis_client.get(indexing_cache_key)
if cache_result is not None:
raise InvalidActionError("Document is being indexed, please try again later")
if action == "enable":
if document.enabled:
- raise InvalidActionError('Document already enabled.')
+ raise InvalidActionError("Document already enabled.")
document.enabled = True
document.disabled_at = None
@@ -795,13 +786,13 @@ def patch(self, dataset_id, document_id, action):
add_document_to_index_task.delay(document_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
elif action == "disable":
- if not document.completed_at or document.indexing_status != 'completed':
- raise InvalidActionError('Document is not completed.')
+ if not document.completed_at or document.indexing_status != "completed":
+ raise InvalidActionError("Document is not completed.")
if not document.enabled:
- raise InvalidActionError('Document already disabled.')
+ raise InvalidActionError("Document already disabled.")
document.enabled = False
document.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
@@ -814,11 +805,11 @@ def patch(self, dataset_id, document_id, action):
remove_document_from_index_task.delay(document_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
elif action == "archive":
if document.archived:
- raise InvalidActionError('Document already archived.')
+ raise InvalidActionError("Document already archived.")
document.archived = True
document.archived_at = datetime.now(timezone.utc).replace(tzinfo=None)
@@ -832,10 +823,10 @@ def patch(self, dataset_id, document_id, action):
remove_document_from_index_task.delay(document_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
elif action == "un_archive":
if not document.archived:
- raise InvalidActionError('Document is not archived.')
+ raise InvalidActionError("Document is not archived.")
document.archived = False
document.archived_at = None
@@ -848,13 +839,12 @@ def patch(self, dataset_id, document_id, action):
add_document_to_index_task.delay(document_id)
- return {'result': 'success'}, 200
+ return {"result": "success"}, 200
else:
raise InvalidActionError()
class DocumentPauseApi(DocumentResource):
-
@setup_required
@login_required
@account_initialization_required
@@ -865,7 +855,7 @@ def patch(self, dataset_id, document_id):
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
document = DocumentService.get_document(dataset.id, document_id)
@@ -881,9 +871,9 @@ def patch(self, dataset_id, document_id):
# pause document
DocumentService.pause_document(document)
except services.errors.document.DocumentIndexingError:
- raise DocumentIndexingError('Cannot pause completed document.')
+ raise DocumentIndexingError("Cannot pause completed document.")
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class DocumentRecoverApi(DocumentResource):
@@ -896,7 +886,7 @@ def patch(self, dataset_id, document_id):
document_id = str(document_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
document = DocumentService.get_document(dataset.id, document_id)
# 404 if document not found
@@ -910,9 +900,9 @@ def patch(self, dataset_id, document_id):
# pause document
DocumentService.recover_document(document)
except services.errors.document.DocumentIndexingError:
- raise DocumentIndexingError('Document is not in paused status.')
+ raise DocumentIndexingError("Document is not in paused status.")
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class DocumentRetryApi(DocumentResource):
@@ -923,15 +913,14 @@ def post(self, dataset_id):
"""retry document."""
parser = reqparse.RequestParser()
- parser.add_argument('document_ids', type=list, required=True, nullable=False,
- location='json')
+ parser.add_argument("document_ids", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
retry_documents = []
if not dataset:
- raise NotFound('Dataset not found.')
- for document_id in args['document_ids']:
+ raise NotFound("Dataset not found.")
+ for document_id in args["document_ids"]:
try:
document_id = str(document_id)
@@ -946,7 +935,7 @@ def post(self, dataset_id):
raise ArchivedDocumentImmutableError()
# 400 if document is completed
- if document.indexing_status == 'completed':
+ if document.indexing_status == "completed":
raise DocumentAlreadyFinishedError()
retry_documents.append(document)
except Exception as e:
@@ -955,7 +944,7 @@ def post(self, dataset_id):
# retry document
DocumentService.retry_document(dataset_id, retry_documents)
- return {'result': 'success'}, 204
+ return {"result": "success"}, 204
class DocumentRenameApi(DocumentResource):
@@ -970,13 +959,13 @@ def post(self, dataset_id, document_id):
dataset = DatasetService.get_dataset(dataset_id)
DatasetService.check_dataset_operator_permission(current_user, dataset)
parser = reqparse.RequestParser()
- parser.add_argument('name', type=str, required=True, nullable=False, location='json')
+ parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
- document = DocumentService.rename_document(dataset_id, document_id, args['name'])
+ document = DocumentService.rename_document(dataset_id, document_id, args["name"])
except services.errors.document.DocumentIndexingError:
- raise DocumentIndexingError('Cannot delete document during indexing.')
+ raise DocumentIndexingError("Cannot delete document during indexing.")
return document
@@ -990,51 +979,43 @@ def get(self, dataset_id, document_id):
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
- raise NotFound('Dataset not found.')
+ raise NotFound("Dataset not found.")
document_id = str(document_id)
document = DocumentService.get_document(dataset.id, document_id)
if not document:
- raise NotFound('Document not found.')
+ raise NotFound("Document not found.")
if document.tenant_id != current_user.current_tenant_id:
- raise Forbidden('No permission.')
- if document.data_source_type != 'website_crawl':
- raise ValueError('Document is not a website document.')
+ raise Forbidden("No permission.")
+ if document.data_source_type != "website_crawl":
+ raise ValueError("Document is not a website document.")
# 403 if document is archived
if DocumentService.check_archived(document):
raise ArchivedDocumentImmutableError()
# sync document
DocumentService.sync_website_document(dataset_id, document)
- return {'result': 'success'}, 200
-
-
-api.add_resource(GetProcessRuleApi, '/datasets/process-rule')
-api.add_resource(DatasetDocumentListApi,
- '/datasets//documents')
-api.add_resource(DatasetInitApi,
- '/datasets/init')
-api.add_resource(DocumentIndexingEstimateApi,
- '/datasets//documents//indexing-estimate')
-api.add_resource(DocumentBatchIndexingEstimateApi,
- '/datasets/