diff --git a/.github/workflows/backend_check.yml b/.github/workflows/backend_check.yml new file mode 100644 index 00000000..42606fae --- /dev/null +++ b/.github/workflows/backend_check.yml @@ -0,0 +1,76 @@ +name: check backend + +on: + push: + branches: + - dev + - master + pull_request: + branches: + - dev + +jobs: + build_backend_and_check_codestyle: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: run black codestyle + run: | + python -m poetry run black --line-length=120 --check . + working-directory: backend/df_designer + + - name: run flake8 codestyle + run: | + python -m poetry run flake8 --max-line-length 120 --ignore=E203 . + working-directory: backend/df_designer + + - name: run isort codestyle + run: | + python -m poetry run isort --line-length=120 --diff . + working-directory: backend/df_designer + + test_backend: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: build wheel + run: | + python -m poetry build + working-directory: backend/df_designer + + - name: Create new project + run: | + python -m poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists + working-directory: backend/df_designer + + - name: run tests + run: | + python -m poetry install + python -m poetry run pytest ../backend/df_designer/app/tests/ --verbose + working-directory: df_designer_project diff --git a/.github/workflows/build_and_upload_release.yml b/.github/workflows/build_and_upload_release.yml new file mode 100644 index 00000000..4a509c24 --- /dev/null +++ b/.github/workflows/build_and_upload_release.yml @@ -0,0 +1,45 @@ +name: build_and_upload_release + +on: + release: + types: + - published + +jobs: + build: + name: build and upload release + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: set up python + uses: actions/setup-python@v5 + + - name: setup poetry + run: | + python -m pip install --upgrade pip poetry + + - name: build wheels and test uploading to pypi + if: startsWith(github.ref, 'refs/tags/v') != true + run: | + python -m poetry --build publish --dry-run + working-directory: backend/df_designer + + - name: build wheels and upload to pypi + if: startsWith(github.ref, 'refs/tags/v') + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} + run: | + python -m poetry --build publish + working-directory: backend/df_designer + + - name: upload binaries into release + if: startsWith(github.ref, 'refs/tags/v') + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: backend/df_designer/dist/* + tag: ${{ github.ref }} + overwrite: true + file_glob: true diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 2c00ffff..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Code checking. -on: - push: - branches: [feat/backend, dev] - pull_request: - branches: [dev] - -jobs: - check: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.10", "3.11", "3.12"] - steps: - - uses: actions/checkout@v4 - - name: tests - uses: actions/setup-python@v4 - with: - python-version: ${{matrix.python-version}} - - name: install dependencies - run: | - python -m pip install --upgrade pip - pipx install poetry - poetry install - - name: tests - run: poetry run python -m pytest - - diff --git a/.github/workflows/docker_check.yml b/.github/workflows/docker_check.yml new file mode 100644 index 00000000..ac7c5795 --- /dev/null +++ b/.github/workflows/docker_check.yml @@ -0,0 +1,33 @@ +name: Build Docker Images + +on: + push: + branches: + - dev + - master + pull_request: + branches: + - dev + +jobs: + build_images: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-ansi --no-interaction + working-directory: backend/df_designer + + - name: Create new project + run: python -m poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists + working-directory: backend/df_designer + + - name: Build Frontend + run: docker build -f Dockerfile --build-arg PROJECT_DIR=df_designer_project --target=frontend-builder . + + - name: Build backend & run app + run: docker build -f Dockerfile --build-arg PROJECT_DIR=df_designer_project --target=runtime . diff --git a/.github/workflows/e2e_test.yml b/.github/workflows/e2e_test.yml new file mode 100644 index 00000000..8bb5850c --- /dev/null +++ b/.github/workflows/e2e_test.yml @@ -0,0 +1,140 @@ +name: test app + +on: + push: + branches: + - dev + - master + pull_request: + branches: + - dev + +jobs: + + build_frontend: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Install bun + run: npm install -g bun + + - name: Install frontend dependencies + run: bun install + working-directory: frontend + + - name: Build frontend + run: bun run build + working-directory: frontend + + - run: touch file1.txt + + - name: Archive frontend dist + uses: actions/upload-artifact@v4 + with: + name: frontend-dist + path: frontend/dist + + + build_backend: + runs-on: ubuntu-latest + needs: build_frontend + steps: + - uses: actions/checkout@v4 + + - name: Create dist directory + run: mkdir -p frontend/dist + + - name: Download frontend dist + uses: actions/download-artifact@v4 + with: + name: frontend-dist + path: frontend/dist + + - name: copy static files + run: | + cp -r frontend/dist/. backend/df_designer/app/static/ + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: build wheel + run: python -m poetry build + working-directory: backend/df_designer + + - name: Archive backend dist + uses: actions/upload-artifact@v4 + with: + name: backend-dist + path: backend/df_designer/dist + + + run_app: + runs-on: ubuntu-latest + needs: build_backend + steps: + - uses: actions/checkout@v4 + + - name: set up python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: setup dflowd poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --with lint --no-interaction + working-directory: backend/df_designer + + - name: Create new project + run: | + python -m poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists + working-directory: backend/df_designer + + - name: Create dist directory + run: mkdir -p backend/df_designer/dist + + - name: Download backend dist + uses: actions/download-artifact@v4 + with: + name: backend-dist + path: backend/df_designer/dist + + - name: setup project poetry and install dependencies + run: | + python -m pip install --upgrade pip poetry + python -m poetry lock --no-update + python -m poetry install --no-interaction + working-directory: df_designer_project + + - name: Run back & front + run: | + python -m poetry run dflowd run_backend & + sleep 10 + working-directory: df_designer_project + + - name: Install bun + run: npm install -g bun + + - name: Install frontend dependencies + run: bun install + working-directory: frontend + + - name: Cypress run + uses: cypress-io/github-action@v6 + with: + install-command: bun add cypress + command: bun run e2e:chrome + working-directory: frontend + browser: chrome diff --git a/.github/workflows/frontend_check.yml b/.github/workflows/frontend_check.yml new file mode 100644 index 00000000..8be0b0a2 --- /dev/null +++ b/.github/workflows/frontend_check.yml @@ -0,0 +1,16 @@ +# name: front check + +# on: [push, pull_request] + +# jobs: +# build_and_check_frontend: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v4 + +# - name: Install bun +# run: npm install -g bun + +# - name: Install dependencies +# run: bun install +# working-directory: frontend diff --git a/.gitignore b/.gitignore index e1214a3d..7752fb9b 100644 --- a/.gitignore +++ b/.gitignore @@ -251,4 +251,9 @@ cython_debug/ ./flows.json *.sqlite -my_project \ No newline at end of file +my_project +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ +df_designer_project diff --git a/Dockerfile b/Dockerfile index 71cbee0f..e21f9846 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,28 +1,26 @@ FROM oven/bun:1 as frontend-base FROM frontend-base AS frontend-builder -WORKDIR /src -COPY ./frontend/package.json /src/frontend/package.json -COPY ./frontend/bun.lockb /src/frontend/bun.lockb +WORKDIR /temp +COPY ./frontend/package.json /temp/frontend/package.json +COPY ./frontend/bun.lockb /temp/frontend/bun.lockb -RUN cd /src/frontend && bun install --frozen-lockfile +RUN cd /temp/frontend && bun install --frozen-lockfile # Copy the rest of the application code -COPY ./frontend/ /src/frontend/ -WORKDIR /src/frontend/ +COPY ./frontend/ /temp/frontend/ +WORKDIR /temp/frontend/ RUN bun run build -RUN ls /src/frontend/dist #--------------------------------------------------------- # Use a slim variant to reduce image size where possible FROM python:3.10-slim as backend-builder -WORKDIR /src +WORKDIR /temp ARG PROJECT_DIR -# ENV PROJECT_DIR ${PROJECT_DIR} ENV POETRY_VERSION=1.8.2 \ POETRY_HOME=/poetry \ @@ -35,13 +33,11 @@ RUN python3 -m venv $POETRY_VENV \ ENV PATH="${PATH}:${POETRY_VENV}/bin" -COPY ./backend/df_designer /src/backend/df_designer -COPY --from=frontend-builder /src/frontend/dist /src/backend/df_designer/app/static - -COPY ./${PROJECT_DIR} /src/${PROJECT_DIR} +COPY ./backend/df_designer /temp/backend/df_designer +COPY --from=frontend-builder /temp/frontend/dist /temp/backend/df_designer/app/static # Build the wheel -WORKDIR /src/backend/df_designer +WORKDIR /temp/backend/df_designer RUN poetry build #--------------------------------------------------------- @@ -52,21 +48,23 @@ FROM python:3.10-slim as runtime ARG PROJECT_DIR +# Install Git +RUN apt-get update && apt-get install -y git + +# Set the GIT_PYTHON_GIT_EXECUTABLE environment variable +ENV GIT_PYTHON_GIT_EXECUTABLE=/usr/bin/git + COPY --from=backend-builder /poetry-venv /poetry-venv # Set environment variable to use the virtualenv ENV PATH="/poetry-venv/bin:$PATH" # Copy only the necessary files -COPY --from=backend-builder /src/backend/df_designer /df_designer -COPY ./${PROJECT_DIR} /${PROJECT_DIR} +COPY --from=backend-builder /temp/backend/df_designer /src2/backend/df_designer +COPY ./${PROJECT_DIR} /src2/project_dir # Install the wheel -WORKDIR /${PROJECT_DIR} -RUN poetry lock --no-update \ - && poetry install +WORKDIR /src2/project_dir +RUN poetry install CMD ["poetry", "run", "dflowd", "run_backend"] - - -# #TODO: change scr to app (maybe) \ No newline at end of file diff --git a/Makefile b/Makefile index e69de29b..c6380440 100644 --- a/Makefile +++ b/Makefile @@ -0,0 +1,138 @@ +SHELL = /bin/bash + +PYTHON = python3 +FRONTEND_DIR = ./frontend +BACKEND_DIR = ./backend/df_designer + + +.PHONY: help +help: + @echo "Usage: make " + @echo "" + @echo "Available targets:" + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " %-20s : %s\n", $$1, $$2}' | grep -v 'make .* &&' + + @echo "" + @echo "These targets are for internal use only:" + @echo "check_project_arg, " + + +# frontend cmds +.PHONY: install_frontend_env +install_frontend_env: ## Installs frontend dependencies using bun + cd ${FRONTEND_DIR} && bun install + +.PHONY: clean_frontend_env +clean_frontend_env: ## Removes node_modules directory + cd ${FRONTEND_DIR} && rm -rf node_modules + +.PHONY: build_frontend +build_frontend: install_frontend_env ## Builds frontend + cd ${FRONTEND_DIR} && bun run build + +.PHONY: run_frontend +run_frontend: install_frontend_env ## Does same as run_dev_frontend + cd ${FRONTEND_DIR} && bun run dev + +.PHONY: run_dev_frontend +run_dev_frontend: install_frontend_env ## Runs frontend in dev mode + make run_frontend + + +# backend cmds +.PHONY: install_backend_env +install_backend_env: ## Installs backend dependencies using poetry + cd ${BACKEND_DIR} && poetry install + +.PHONY: clean_backend_env +clean_backend_env: ## Removes backend dependencies using poetry + cd ${BACKEND_DIR} && poetry install_env remove --all + +.PHONY: build_backend +build_backend: install_backend_env ## Builds the backend wheel + cd ${BACKEND_DIR} && poetry build + +.PHONY: check_project_arg +check_project_arg: + @if [ -z "$(PROJECT_NAME)" ]; then \ + echo "PROJECT_NAME is not defined. Please pass PROJECT_NAME= with the command."; \ + exit 1; \ + fi + +.PHONY: run_backend +run_backend: check_project_arg ## Runs backend using the built dist. NEEDS arg: PROJECT_NAME + cd ${PROJECT_NAME} && poetry install && poetry run dflowd run_backend --conf-reload="False" + +.PHONY: run_dev_backend +run_dev_backend: check_project_arg install_backend_env ## Runs backend in dev mode. NEEDS arg: PROJECT_NAME + cd ${BACKEND_DIR} && poetry run dflowd run_backend --project-dir ../../${PROJECT_NAME} + +# backend tests +.PHONY: unit_tests +unit_tests: install_backend_env ## Runs all backend unit tests + cd ${BACKEND_DIR} && poetry run pytest ./app/tests/api + cd ${BACKEND_DIR} && poetry run pytest ./app/tests/services + +.PHONY: integration_tests +integration_tests: build_backend ## Runs all backend integration tests + if [ ! -d "./df_designer_project" ]; then \ + cd "${BACKEND_DIR}" && \ + poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists; \ + fi + + cd df_designer_project && \ + poetry install && \ + poetry run pytest ../${BACKEND_DIR}/app/tests/integration + + +.PHONY: backend_e2e_test +backend_e2e_test: build_backend ## Runs e2e backend test + if [ ! -d "./df_designer_project" ]; then \ + cd "${BACKEND_DIR}" && \ + poetry run dflowd init --destination ../../ --no-input --overwrite-if-exists; \ + fi + + cd df_designer_project && \ + poetry install && \ + poetry run pytest ../${BACKEND_DIR}/app/tests/e2e + + +.PHONY: backend_tests +backend_tests: ## Runs all backend tests + make unit_tests + make integration_tests + make backend_e2e_test + + +# app cmds +.PHONY: install_env +install_env: ## Installs frontend & backend deps + make install_frontend_env + make install_backend_env + +.PHONY: clean_env +clean_env: ## Removes frontend & backend deps + make clean_frontend_env + make clean_backend_env + +.PHONY: build +build: install_env ## Builds both frontend & backend + make build_frontend + make build_backend + +.PHONY: run_app +run_app: check_project_arg install_env build_frontend ## Builds frontend and backend then runs the app. NEEDS arg: PROJECT_NAME + cp ${FRONTEND_DIR}/dist/* ${BACKEND_DIR}/app/static/ && \ + make build_backend && \ + make run_backend PROJECT_NAME=${PROJECT_NAME} + +.PHONY: run_dev +run_dev: check_project_arg install_env ## Runs both backend and frontend in dev mode. NEEDS arg: PROJECT_NAME + make run_dev_backend PROJECT_NAME=${PROJECT_NAME} & + make run_dev_frontend + + + +.PHONY: init_proj +init_proj: install_backend_env ## Initiates a new project using dflowd + cd ${BACKEND_DIR} && poetry run dflowd init --destination ../../ diff --git a/backend/df_designer/app/api/api_v1/api.py b/backend/df_designer/app/api/api_v1/api.py index 7a75d793..d6e857e6 100644 --- a/backend/df_designer/app/api/api_v1/api.py +++ b/backend/df_designer/app/api/api_v1/api.py @@ -1,9 +1,10 @@ from fastapi import APIRouter -from app.api.api_v1.endpoints import bot, flows +from app.api.api_v1.endpoints import bot, dff_services, flows from app.core.config import settings api_router = APIRouter() api_router.include_router(flows.router, prefix="/".join([settings.API_V1_STR, "flows"]), tags=["flows"]) +api_router.include_router(dff_services.router, prefix="/".join([settings.API_V1_STR, "services"]), tags=["services"]) api_router.include_router(bot.router, prefix="/".join([settings.API_V1_STR, "bot"]), tags=["bot"]) diff --git a/backend/df_designer/app/api/api_v1/endpoints/bot.py b/backend/df_designer/app/api/api_v1/endpoints/bot.py index d84cb29f..b6c8ee38 100644 --- a/backend/df_designer/app/api/api_v1/endpoints/bot.py +++ b/backend/df_designer/app/api/api_v1/endpoints/bot.py @@ -1,50 +1,49 @@ import asyncio -from fastapi import APIRouter, HTTPException, Depends, WebSocket, WebSocketException, status, BackgroundTasks from typing import Optional, Union -from app.schemas.preset import Preset -from app.schemas.pagination import Pagination -from app.core.logger_config import get_logger -from app.services.process_manager import ProcessManager, BuildManager, RunManager +from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, WebSocket, WebSocketException, status + from app.api import deps +from app.core.logger_config import get_logger +from app.schemas.pagination import Pagination +from app.schemas.preset import Preset +from app.services.process_manager import BuildManager, ProcessManager, RunManager from app.services.websocket_manager import WebSocketManager - router = APIRouter() logger = get_logger(__name__) -async def _stop_process( - id_: int, process_manager: ProcessManager, process= "run" -): - if id_ not in process_manager.processes: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Process not found. It may have already exited." - ) +async def _stop_process(id_: int, process_manager: ProcessManager, process="run"): try: await process_manager.stop(id_) except (RuntimeError, ProcessLookupError) as e: - raise HTTPException(status_code=404, detail="Process not found. It may have already exited or not started yet. Please check logs.") from e + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Process not found. It may have already exited or not started yet. Please check logs.", + ) from e logger.info("%s process '%s' has stopped", process.capitalize(), id_) return {"status": "ok"} -def _check_process_status(id_: int, process_manager: ProcessManager) -> dict[str, str]: +async def _check_process_status(id_: int, process_manager: ProcessManager) -> dict[str, str]: if id_ not in process_manager.processes: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Process not found. It may have already exited.", + status_code=status.HTTP_404_NOT_FOUND, + detail="Process not found. It may have already exited.", ) - process_status = process_manager.get_status(id_) - return {"status": process_status} + process_status = await process_manager.get_status(id_) + return {"status": process_status.value} @router.post("/build/start", status_code=201) -async def start_build(preset: Preset, background_tasks: BackgroundTasks, build_manager: BuildManager = Depends(deps.get_build_manager)): +async def start_build( + preset: Preset, background_tasks: BackgroundTasks, build_manager: BuildManager = Depends(deps.get_build_manager) +): await asyncio.sleep(preset.wait_time) - await build_manager.start(preset) - build_id = build_manager.get_last_id() + build_id = await build_manager.start(preset) background_tasks.add_task(build_manager.check_status, build_id) logger.info("Build process '%s' has started", build_id) return {"status": "ok", "build_id": build_id} @@ -57,35 +56,42 @@ async def stop_build(*, build_id: int, build_manager: BuildManager = Depends(dep @router.get("/build/status/{build_id}", status_code=200) async def check_build_status(*, build_id: int, build_manager: BuildManager = Depends(deps.get_build_manager)): - return _check_process_status(build_id, build_manager) + return await _check_process_status(build_id, build_manager) @router.get("/builds", response_model=Optional[Union[list, dict]], status_code=200) async def check_build_processes( build_id: Optional[int] = None, build_manager: BuildManager = Depends(deps.get_build_manager), - pagination: Pagination = Depends() + run_manager: RunManager = Depends(deps.get_run_manager), + pagination: Pagination = Depends(), ): if build_id is not None: - return await build_manager.get_build_info(build_id) + return await build_manager.get_build_info(build_id, run_manager) else: - return await build_manager.get_full_info(offset=pagination.offset(), limit=pagination.limit) + return await build_manager.get_full_info_with_runs_info( + run_manager, offset=pagination.offset(), limit=pagination.limit + ) + @router.get("/builds/logs/{build_id}", response_model=Optional[list], status_code=200) async def get_build_logs( - build_id: int, - build_manager: BuildManager = Depends(deps.get_build_manager), - pagination: Pagination = Depends() + build_id: int, build_manager: BuildManager = Depends(deps.get_build_manager), pagination: Pagination = Depends() ): if build_id is not None: return await build_manager.fetch_build_logs(build_id, pagination.offset(), pagination.limit) @router.post("/run/start/{build_id}", status_code=201) -async def start_run(*, build_id: int, preset: Preset, background_tasks: BackgroundTasks, run_manager: RunManager = Depends(deps.get_run_manager)): +async def start_run( + *, + build_id: int, + preset: Preset, + background_tasks: BackgroundTasks, + run_manager: RunManager = Depends(deps.get_run_manager) +): await asyncio.sleep(preset.wait_time) - await run_manager.start(build_id, preset) - run_id = run_manager.get_last_id() + run_id = await run_manager.start(build_id, preset) background_tasks.add_task(run_manager.check_status, run_id) logger.info("Run process '%s' has started", run_id) return {"status": "ok", "run_id": run_id} @@ -98,14 +104,14 @@ async def stop_run(*, run_id: int, run_manager: RunManager = Depends(deps.get_ru @router.get("/run/status/{run_id}", status_code=200) async def check_run_status(*, run_id: int, run_manager: RunManager = Depends(deps.get_run_manager)): - return _check_process_status(run_id, run_manager) + return await _check_process_status(run_id, run_manager) @router.get("/runs", response_model=Optional[Union[list, dict]], status_code=200) async def check_run_processes( run_id: Optional[int] = None, run_manager: RunManager = Depends(deps.get_run_manager), - pagination: Pagination = Depends() + pagination: Pagination = Depends(), ): if run_id is not None: return await run_manager.get_run_info(run_id) @@ -115,9 +121,7 @@ async def check_run_processes( @router.get("/runs/logs/{run_id}", response_model=Optional[list], status_code=200) async def get_run_logs( - run_id: int, - run_manager: RunManager = Depends(deps.get_run_manager), - pagination: Pagination = Depends() + run_id: int, run_manager: RunManager = Depends(deps.get_run_manager), pagination: Pagination = Depends() ): if run_id is not None: return await run_manager.fetch_run_logs(run_id, pagination.offset(), pagination.limit) @@ -147,12 +151,17 @@ async def connect( logger.error("process with run_id '%s' exited or never existed", run_id) raise WebSocketException(code=status.WS_1014_BAD_GATEWAY) - await websocket_manager.connect(websocket) logger.info("Websocket for run process '%s' has been opened", run_id) - output_task = asyncio.create_task(websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket)) - input_task = asyncio.create_task(websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket)) + await websocket.send_text("Start chatting") + + output_task = asyncio.create_task( + websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) + ) + input_task = asyncio.create_task( + websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket) + ) # Wait for either task to finish _, websocket_manager.pending_tasks[websocket] = await asyncio.wait( diff --git a/backend/df_designer/app/api/api_v1/endpoints/dff_services.py b/backend/df_designer/app/api/api_v1/endpoints/dff_services.py new file mode 100644 index 00000000..904a4201 --- /dev/null +++ b/backend/df_designer/app/api/api_v1/endpoints/dff_services.py @@ -0,0 +1,21 @@ +from fastapi import APIRouter, Depends + +from app.api.deps import get_index +from app.core.logger_config import get_logger +from app.services.index import Index + +router = APIRouter() + +logger = get_logger(__name__) + + +@router.get("/search/{service_name}", status_code=200) +async def search_service(service_name: str, index: Index = Depends(get_index)): + response = await index.search_service(service_name) + return response + + +@router.get("/refresh_index", status_code=200) +async def refresh_index(index: Index = Depends(get_index)): + await index.load() + return {"status": "ok"} diff --git a/backend/df_designer/app/api/api_v1/endpoints/flows.py b/backend/df_designer/app/api/api_v1/endpoints/flows.py index 820f54e8..96eedff6 100644 --- a/backend/df_designer/app/api/api_v1/endpoints/flows.py +++ b/backend/df_designer/app/api/api_v1/endpoints/flows.py @@ -1,23 +1,52 @@ -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException, status +from git import Repo from omegaconf import OmegaConf -from app.core.logger_config import get_logger from app.core.config import settings -from app.db.base import write_conf, read_conf +from app.core.logger_config import get_logger +from app.db.base import read_conf, write_conf +from app.utils.git_cmd import commit_changes, get_repo router = APIRouter() logger = get_logger(__name__) -@router.get("/") -async def flows_get(): +@router.get("/{build_id}") +async def flows_get(build_id: str): + repo = Repo.init(settings.frontend_flows_path.parent) + for tag in repo.tags: + if tag.name == str(build_id): + break + else: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Build_id {build_id} doesn't exist", + ) + + repo.git.checkout(build_id, settings.frontend_flows_path.name) + omega_flows = await read_conf(settings.frontend_flows_path) dict_flows = OmegaConf.to_container(omega_flows, resolve=True) return {"status": "ok", "data": dict_flows} -@router.post("/") -async def flows_post(flows: dict) -> dict[str, str]: +@router.post("/{build_id}") +async def flows_post(flows: dict, build_id: str) -> dict[str, str]: + repo = get_repo(settings.frontend_flows_path.parent) + for tag in repo.tags: + if tag.name == str(build_id): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Build_id {build_id} already exists", + ) + await write_conf(flows, settings.frontend_flows_path) + logger.info("Flows saved to DB") + + commit_changes(repo, "Save frontend flows") + repo.create_tag(build_id) + + logger.info("Flows saved to git with tag %s", build_id) + return {"status": "ok"} diff --git a/backend/df_designer/app/api/deps.py b/backend/df_designer/app/api/deps.py index e308de30..934e5cd9 100644 --- a/backend/df_designer/app/api/deps.py +++ b/backend/df_designer/app/api/deps.py @@ -1,14 +1,30 @@ +from app.services.index import Index from app.services.process_manager import BuildManager, RunManager from app.services.websocket_manager import WebSocketManager build_manager = BuildManager() + + def get_build_manager() -> BuildManager: return build_manager + run_manager = RunManager() + + def get_run_manager() -> RunManager: return run_manager + websocket_manager = WebSocketManager() + + def get_websocket_manager() -> WebSocketManager: return websocket_manager + + +index = Index() + + +def get_index() -> Index: + return index diff --git a/backend/df_designer/app/cli.py b/backend/df_designer/app/cli.py index 1a947a26..5ba56825 100644 --- a/backend/df_designer/app/cli.py +++ b/backend/df_designer/app/cli.py @@ -1,28 +1,42 @@ import asyncio -from cookiecutter.main import cookiecutter import json import os -from pathlib import Path -import subprocess import sys +from pathlib import Path + import typer import uvicorn +from cookiecutter.main import cookiecutter +from git import Repo from app.core.config import settings from app.core.logger_config import get_logger from app.services.json_translator import translator +from app.utils.git_cmd import commit_changes cli = typer.Typer() -def _execute_command(command_to_run): +def init_new_repo(git_path: Path, tag_name: str): + repo = Repo.init(git_path) + repo.git.checkout(b="dev") + commit_changes(repo, "Init frontend flows") + repo.create_tag(tag_name) + + +async def _execute_command(command_to_run): logger = get_logger(__name__) try: - process = subprocess.run(command_to_run.split(),check=False) + process = await asyncio.create_subprocess_exec(*command_to_run.split()) # Check the return code to determine success if process.returncode == 0: logger.info("Command '%s' executed successfully.", command_to_run) + elif process.returncode is None: + logger.info("Process by command '%s' is running.", command_to_run) + await process.wait() + logger.info("Process ended with return code: %d.", process.returncode) + sys.exit(process.returncode) else: logger.error("Command '%s' failed with return code: %d", command_to_run, process.returncode) sys.exit(process.returncode) @@ -34,51 +48,57 @@ def _execute_command(command_to_run): def _execute_command_file(build_id: int, project_dir: str, command_file: str, preset: str): logger = get_logger(__name__) - presets_build_path = os.path.join(project_dir, "df_designer", "presets", command_file) + presets_build_path = Path(project_dir) / "df_designer" / "presets" / command_file with open(presets_build_path) as file: presets_build_file = json.load(file) if preset in presets_build_file: command_to_run = presets_build_file[preset]["cmd"] if preset == "success": - command_to_run += f" {build_id}" + command_to_run += f" {build_id} --call_from_open_event_loop True" logger.debug("Executing command for preset '%s': %s", preset, command_to_run) - _execute_command(command_to_run) + asyncio.run(_execute_command(command_to_run)) else: raise ValueError(f"Invalid preset '{preset}'. Preset must be one of {list(presets_build_file.keys())}") @cli.command("build_bot") -def build_bot( - build_id: int, - project_dir: str = settings.work_directory, - preset: str = "success" -): +def build_bot(build_id: int, project_dir: str = settings.work_directory, preset: str = "success"): _execute_command_file(build_id, project_dir, "build.json", preset) @cli.command("build_scenario") -def build_scenario(build_id: int, project_dir: str = "."): - asyncio.run(translator(build_id=build_id, project_dir=project_dir)) +def build_scenario(build_id: int, project_dir: str = ".", call_from_open_event_loop: bool = False): + if call_from_open_event_loop: + loop = asyncio.get_event_loop() + loop.create_task(translator(build_id=build_id, project_dir=project_dir)) + loop.run_until_complete(asyncio.wait([], return_when=asyncio.FIRST_COMPLETED)) + else: + asyncio.run(translator(build_id=build_id, project_dir=project_dir)) + @cli.command("run_bot") -def run_bot( - build_id: int, - project_dir: str = settings.work_directory, - preset: str = "success" -): +def run_bot(build_id: int, project_dir: str = settings.work_directory, preset: str = "success"): _execute_command_file(build_id, project_dir, "run.json", preset) @cli.command("run_scenario") -def run_scenario( - build_id: int, - project_dir: str = "." -): - script_path = Path(project_dir) / "bot" / "scripts" / f"build_{build_id}.yaml" +def run_scenario(build_id: int, project_dir: str = ".", call_from_open_event_loop: bool = False): + # checkout the commit and then run the build + bot_repo = Repo.init(Path(project_dir) / "bot") + bot_repo.git.checkout(build_id, "scripts/build.yaml") + + script_path = Path(project_dir) / "bot" / "scripts" / "build.yaml" + if not script_path.exists(): + raise FileNotFoundError(f"File {script_path} doesn't exist") command_to_run = f"poetry run python {project_dir}/app.py --script-path {script_path}" - _execute_command(command_to_run) + if call_from_open_event_loop: + loop = asyncio.get_event_loop() + loop.create_task(_execute_command(command_to_run)) + loop.run_until_complete(asyncio.wait([], return_when=asyncio.FIRST_COMPLETED)) + else: + asyncio.run(_execute_command(command_to_run)) async def _run_server() -> None: @@ -103,17 +123,25 @@ def run_backend( settings.host, settings.backend_port, reload=settings.conf_reload, - reload_dirs=str(settings.work_directory) + reload_dirs=str(settings.work_directory), ) settings.server = uvicorn.Server(settings.uvicorn_config) settings.server.run() @cli.command("init") -def init(destination: str = settings.work_directory): +def init(destination: str = settings.work_directory, no_input: bool = False, overwrite_if_exists: bool = True): original_dir = os.getcwd() try: os.chdir(destination) - cookiecutter("https://github.com/Ramimashkouk/df_d_template.git") + proj_path = cookiecutter( + "https://github.com/Ramimashkouk/df_d_template.git", + no_input=no_input, + overwrite_if_exists=overwrite_if_exists, + checkout="feat/versioning", + ) finally: os.chdir(original_dir) + + init_new_repo(Path(proj_path) / "bot", tag_name="43") + init_new_repo(Path(proj_path) / "df_designer", tag_name="43") diff --git a/backend/df_designer/app/core/config.py b/backend/df_designer/app/core/config.py index 8227ded3..13f3ba81 100644 --- a/backend/df_designer/app/core/config.py +++ b/backend/df_designer/app/core/config.py @@ -1,13 +1,14 @@ -from pydantic_settings import BaseSettings from pathlib import Path -from omegaconf import OmegaConf + import uvicorn +from pydantic_settings import BaseSettings + class Settings(BaseSettings): API_V1_STR: str = "/api/v1" APP: str = "app.main:app" - work_directory: str = "." + work_directory: Path = Path(".") config_file_path: Path = Path(__file__).absolute() static_files: Path = config_file_path.parent.with_name("static") start_page: Path = static_files.joinpath("index.html") @@ -17,16 +18,18 @@ class Settings(BaseSettings): backend_port: int = 8000 ui_port: int = 3000 log_level: str = "debug" - conf_reload: bool = True # Enable auto-reload for development mode + conf_reload: bool = True # Enable auto-reload for development mode builds_path: Path = Path(f"{work_directory}/df_designer/builds.yaml") runs_path: Path = Path(f"{work_directory}/df_designer/runs.yaml") dir_logs: Path = Path(f"{work_directory}/df_designer/logs") - frontend_flows_path : Path = Path(f"{work_directory}/df_designer/frontend_flows.yaml") + frontend_flows_path: Path = Path(f"{work_directory}/df_designer/frontend_flows.yaml") + index_path: Path = Path(f"{work_directory}/bot/custom/.services_index.yaml") uvicorn_config: uvicorn.Config = uvicorn.Config( APP, host, backend_port, log_level=log_level, reload=conf_reload, reload_dirs=[work_directory, str(package_dir)] ) server: uvicorn.Server = uvicorn.Server(uvicorn_config) + settings = Settings() diff --git a/backend/df_designer/app/core/logger_config.py b/backend/df_designer/app/core/logger_config.py index 01c064cc..7bd734e4 100644 --- a/backend/df_designer/app/core/logger_config.py +++ b/backend/df_designer/app/core/logger_config.py @@ -1,8 +1,7 @@ -from datetime import datetime import logging +from datetime import datetime from pathlib import Path -from typing import Optional -import os +from typing import Literal, Optional from app.core.config import settings @@ -14,27 +13,30 @@ "debug": logging.DEBUG, } -def setup_logging(log_type: str, log_name: str) -> Path: #TODO: rename: setup_detailed_logging + +def setup_logging(log_type: Literal["builds", "runs"], id_: int, timestamp: datetime) -> Path: # Ensure log_type is either 'builds' or 'runs' - if log_type not in ['builds', 'runs']: + if log_type not in ["builds", "runs"]: raise ValueError("log_type must be 'builds' or 'runs'") - today_date = datetime.now().strftime("%Y%m%d") + # Get today's date separated with '_' using the timestamp + log_name = "_".join([str(id_), timestamp.strftime("%H:%M:%S")]) + today_date = timestamp.strftime("%Y%m%d") + log_directory = settings.dir_logs / log_type / today_date - os.makedirs(log_directory, exist_ok=True) + log_directory.mkdir(parents=True, exist_ok=True) log_file = log_directory / f"{log_name}.log" - if not os.path.exists(log_file): - open(log_file, 'w', encoding="UTF-8").close() + log_file.touch(exist_ok=True) return log_file + def get_logger(name, file_handler_path: Optional[Path] = None): if file_handler_path is None: - os.makedirs(settings.dir_logs, exist_ok=True) - file_handler_path = settings.dir_logs/ "logs.log" - if not os.path.exists(file_handler_path): - open(file_handler_path, 'w', encoding="UTF-8").close() + file_handler_path = settings.dir_logs / "logs.log" + file_handler_path.parent.mkdir(parents=True, exist_ok=True) + file_handler_path.touch(exist_ok=True) logger = logging.getLogger(name) logger.propagate = False @@ -45,8 +47,8 @@ def get_logger(name, file_handler_path: Optional[Path] = None): c_handler.setLevel(LOG_LEVELS[settings.log_level]) f_handler.setLevel(LOG_LEVELS[settings.log_level]) - c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s') - f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + c_format = logging.Formatter("%(name)s - %(levelname)s - %(message)s") + f_format = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") c_handler.setFormatter(c_format) f_handler.setFormatter(f_format) diff --git a/backend/df_designer/app/db/base.py b/backend/df_designer/app/db/base.py index cd37631a..9328ce5c 100644 --- a/backend/df_designer/app/db/base.py +++ b/backend/df_designer/app/db/base.py @@ -1,25 +1,29 @@ from asyncio import Lock -import aiofiles from pathlib import Path -from omegaconf import OmegaConf from typing import Union +import aiofiles +from omegaconf import OmegaConf + file_lock = Lock() + async def read_conf(path: Path): async with file_lock: async with aiofiles.open(path, "r", encoding="UTF-8") as file: data = await file.read() - omega_data = OmegaConf.create(data) # read from a YAML string + omega_data = OmegaConf.create(data) # read from a YAML string return omega_data + async def write_conf(data: Union[list, dict], path: Path): yaml_conf = OmegaConf.to_yaml(data) async with file_lock: - async with aiofiles.open(path, "w", encoding="UTF-8") as file: #TODO: change to "a" for append + async with aiofiles.open(path, "w", encoding="UTF-8") as file: # TODO: change to "a" for append await file.write(yaml_conf) + async def read_logs(log_file: Path): async with aiofiles.open(log_file, "r", encoding="UTF-8") as file: - logs = [line async for line in file if line.strip()] - return logs \ No newline at end of file + logs = [line async for line in file] + return logs diff --git a/backend/df_designer/app/main.py b/backend/df_designer/app/main.py index ea92f2be..d05b93b0 100644 --- a/backend/df_designer/app/main.py +++ b/backend/df_designer/app/main.py @@ -1,36 +1,52 @@ -from fastapi import FastAPI, APIRouter, Response +from contextlib import asynccontextmanager + +from fastapi import APIRouter, FastAPI, Response from fastapi.middleware.cors import CORSMiddleware -from fastapi.responses import HTMLResponse, FileResponse, RedirectResponse +from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse from app.api.api_v1.api import api_router +from app.api.deps import get_index from app.core.config import settings -app = FastAPI(title="DF Designer") +index_dict = {} + + +@asynccontextmanager +async def lifespan(app: FastAPI): + index_dict["instance"] = get_index() + await index_dict["instance"].load() + yield + # Clean up and release the resources + + +app = FastAPI(title="DF Designer", lifespan=lifespan) app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) root_router = APIRouter() + @root_router.get("/app/{path:path}") async def route_static_file(path: str): - if not settings.start_page.exists(): - return HTMLResponse(content="frontend is not built") - file_path = settings.static_files / path.split("/")[-1] - if file_path.suffix in (".js", ".css", ".html"): - return FileResponse(file_path) - return FileResponse(settings.static_files / "index.html") + if not settings.start_page.exists(): + return HTMLResponse(content="frontend is not built") + file_path = settings.static_files / path.split("/")[-1] + if file_path.suffix in (".js", ".css", ".html"): + return FileResponse(file_path) + return FileResponse(settings.static_files / "index.html") @root_router.get("/") async def root() -> Response: - """Redirect '/' to index.html""" - return RedirectResponse(url="/app") + """Redirect '/' to index.html""" + return RedirectResponse(url="/app") + app.include_router(root_router) app.include_router(api_router) diff --git a/backend/df_designer/app/schemas/pagination.py b/backend/df_designer/app/schemas/pagination.py index e31bc241..bb089274 100644 --- a/backend/df_designer/app/schemas/pagination.py +++ b/backend/df_designer/app/schemas/pagination.py @@ -1,5 +1,6 @@ -from pydantic import BaseModel from fastapi import Query +from pydantic import BaseModel + class Pagination(BaseModel): page: int = Query(1, gt=0) diff --git a/backend/df_designer/app/schemas/preset.py b/backend/df_designer/app/schemas/preset.py index 2f6b9cae..0efb5b66 100644 --- a/backend/df_designer/app/schemas/preset.py +++ b/backend/df_designer/app/schemas/preset.py @@ -1,6 +1,8 @@ -from pydantic import BaseModel from typing import Literal +from pydantic import BaseModel + + class Preset(BaseModel): wait_time: float end_status: Literal["success", "failure", "loop"] diff --git a/backend/df_designer/app/schemas/process_status.py b/backend/df_designer/app/schemas/process_status.py new file mode 100644 index 00000000..5a293dca --- /dev/null +++ b/backend/df_designer/app/schemas/process_status.py @@ -0,0 +1,11 @@ +from enum import Enum + + +class Status(Enum): + NULL = "null" + STOPPED = "stopped" + COMPLETED = "completed" + FAILED = "failed" + RUNNING = "running" + ALIVE = "alive" + FAILED_WITH_UNEXPECTED_CODE = "failed with unexpected returncode" diff --git a/backend/df_designer/app/services/index.py b/backend/df_designer/app/services/index.py new file mode 100644 index 00000000..6b36ef00 --- /dev/null +++ b/backend/df_designer/app/services/index.py @@ -0,0 +1,102 @@ +import asyncio +from typing import List + +from omegaconf import OmegaConf + +from app.core.config import settings +from app.core.logger_config import get_logger +from app.db.base import read_conf, read_logs, write_conf + + +class Index: + def __init__(self): + self.path = settings.index_path + self.index = {} + self.conditions = [] + self.responses = [] + self.services = [] + self.logger = get_logger(__name__) + + async def _load_index(self): + db_index = await read_conf(self.path) + index_dict = OmegaConf.to_container(db_index, resolve=True) + self.index = index_dict + self.logger.debug("Index loaded") + + async def _load_conditions(self): + if (path := self.path.parent / "conditions.py").exists(): + self.conditions = await read_logs(path) + self.logger.debug("Conditions loaded") + else: + self.logger.warning("No conditions file found") + + async def _load_responses(self): + if (path := self.path.parent / "responses.py").exists(): + self.responses = await read_logs(path) + self.logger.debug("Responses loaded") + else: + self.logger.warning("No responses file found") + + async def _load_services(self): + if (path := self.path.parent / "services.py").exists(): + self.services = await read_logs(path) + self.logger.debug("Services loaded") + else: + self.logger.warning("No services file found") + + def _get_service(self, services_lst: list, lineno: int): + service = [] + func_lines = services_lst[lineno - 1 :] + self.logger.debug("services_lst: %s", services_lst) + for func_lineno, func_line in enumerate(func_lines): + if func_line[:4] == "def " and func_lineno != 0: + break + service.append(func_line) # ?maybe with \n + return service + + async def load(self): + """load index and services into memory""" + if not self.path.exists(): + raise FileNotFoundError(f"File {self.path} doesn't exist") + + await asyncio.gather( + self._load_index(), + self._load_conditions(), + self._load_responses(), + self._load_services(), + ) + self.logger.info("Index and services loaded") + self.logger.debug("Loaded index: %s", self.index) + + def get_services(self): + return self.index + + async def search_service(self, service_name): + if service_name not in self.index: + return [] + type_ = self.index[service_name]["type"] + lineno = int(self.index[service_name]["lineno"]) + + if type_ == "condition": + return self._get_service(self.conditions, lineno) + elif type_ == "response": + return self._get_service(self.responses, lineno) + elif type_ == "service": + return self._get_service(self.services, lineno) + + async def indexit(self, service_name: str, type_, lineno): + self.logger.debug("Indexing '%s'", service_name) + await self.indexit_all([service_name], [type_], [lineno]) + self.logger.info("Indexed '%s'", service_name) + + async def indexit_all(self, services_names: List[str], types: List[str], linenos: List[int]): + if not self.path.exists(): + raise FileNotFoundError(f"File {self.path} doesn't exist") + + for service_name, type_, lineno in zip(services_names, types, linenos): + self.index[service_name] = { + "type": type_, # condition/response/service + "lineno": lineno, + } + + await write_conf(self.index, self.path) # ?to background tasks diff --git a/backend/df_designer/app/services/json_translator.py b/backend/df_designer/app/services/json_translator.py index 4c71b0ca..0a8c026e 100644 --- a/backend/df_designer/app/services/json_translator.py +++ b/backend/df_designer/app/services/json_translator.py @@ -1,63 +1,209 @@ from pathlib import Path +from typing import Tuple -from app.db.base import read_conf, write_conf +from git import Repo + +from app.api.deps import get_index +from app.core.config import settings from app.core.logger_config import get_logger +from app.db.base import read_conf, write_conf +from app.utils.git_cmd import commit_changes, get_repo logger = get_logger(__name__) -async def translator(build_id: int, project_dir: str): - frontend_graph_path = Path(project_dir) / "df_designer" / "frontend_flows.yaml" - script_file = Path(project_dir) / "bot" / "scripts" / f"build_{build_id}.yaml" - custom_dir = "custom" - custom_dir_path = "bot" / Path(custom_dir) - custom_dir_path.mkdir(exist_ok=True, parents=True) - custom_conditions_file = custom_dir_path / "conditions.py" +def get_db_paths(project_dir: Path, custom_dir: str) -> Tuple[Path, Path, Path]: + frontend_graph_path = project_dir / "df_designer" / "frontend_flows.yaml" + custom_conditions_file = project_dir / "bot" / custom_dir / "conditions.py" + script_path = project_dir / "bot" / "scripts" / "build.yaml" - script = { - "CONFIG": {"custom_dir": "/".join(["..", custom_dir])}, - } - flow_graph = await read_conf(frontend_graph_path) + if not frontend_graph_path.exists(): + raise FileNotFoundError(f"File {frontend_graph_path} doesn't exist") + if not custom_conditions_file.exists(): + raise FileNotFoundError(f"File {custom_conditions_file} doesn't exist") + if not script_path.exists(): + script_path.parent.mkdir(parents=True, exist_ok=True) + script_path.touch() + return frontend_graph_path, script_path, custom_conditions_file + + +def organize_graph_according_to_nodes(flow_graph, script): nodes = {} for flow in flow_graph["flows"]: for node in flow.data.nodes: - if node.type == "start_node": + if "flags" in node.data and "start" in node.data.flags: + if "start_label" in script["CONFIG"]: + raise ValueError("There are more than one start node in the flow") script["CONFIG"]["start_label"] = [flow.name, node.data.name] nodes[node.id] = {"info": node} nodes[node.id]["flow"] = flow.name nodes[node.id]["TRANSITIONS"] = [] - for flow in flow_graph["flows"]: - for edge in flow.data.edges: - if edge.source in nodes and edge.target in nodes: - condition = next(condition for condition in nodes[edge.source]["info"].data.conditions if condition["id"] == edge.sourceHandle) - - custom_conditions = custom_conditions_file.read_text() - custom_conditions_names = [fun.split("(")[0].strip() for fun in custom_conditions.split("def ")[1:]] - if condition.name not in custom_conditions_names: - with open(custom_conditions_file, "a", encoding="UTF-8") as f: - f.write(condition.data.action + "\n") - logger.debug("Writing to %s: %s", custom_conditions_file, condition.name) - - nodes[edge.source]["TRANSITIONS"].append( - { - "lbl": [ - nodes[edge.target]['flow'], - nodes[edge.target]['info'].data.name, - condition.data.priority - ], - "cnd": f"custom_dir.conditions.{condition.name}" - } - ) + return nodes + + +def get_condition(nodes, edge): + try: + return next( + condition + for condition in nodes[edge.source]["info"].data.conditions + if condition["id"] == edge.sourceHandle + ) + except StopIteration: + return None + +def write_conditions_to_file(conditions_lines, custom_conditions_file): + # TODO: make reading and writing conditions async + with open(custom_conditions_file, "w", encoding="UTF-8") as file: + for line in conditions_lines: + if line[-1:] != "\n": + line = "".join([line, "\n"]) + file.write(line) + + +def add_transitions(nodes, edge, condition): + nodes[edge.source]["TRANSITIONS"].append( + { + "lbl": [ + nodes[edge.target]["flow"], + nodes[edge.target]["info"].data.name, + condition.data.priority, + ], + "cnd": f"custom_dir.conditions.{condition.name}", + } + ) + + +def fill_nodes_into_script(nodes, script): for _, node in nodes.items(): if node["flow"] not in script: script[node["flow"]] = {} - script[node["flow"]].update({ - node["info"].data.name: { - "RESPONSE": {"dff.Message": {"text": node["info"].data.response}}, - "TRANSITIONS": node["TRANSITIONS"], - }, - }) - - await write_conf(script, script_file) + script[node["flow"]].update( + { + node["info"].data.name: { + "RESPONSE": {"dff.Message": {"text": node["info"].data.response}}, + "TRANSITIONS": node["TRANSITIONS"], + } + } + ) + + +def append_condition(condition, conditions_lines): + condition = "".join([condition.data.python.action + "\n\n"]) + logger.debug(f"Condition to append: {condition}") + logger.debug(f"conditions_lines before appending: {conditions_lines}") + all_lines = conditions_lines + condition.split("\n") + return all_lines + + +async def _shift_cnds_in_index(index, cnd_strt_lineno, diff_in_lines): + services = index.get_services() + for _, service in services.items(): + if service["type"] == "condition": + if service["lineno"] - 1 > cnd_strt_lineno: # -1 is here to convert from file numeration to list numeration + service["lineno"] += diff_in_lines + + await index.indexit_all( + [service_name for service_name, _ in services.items()], + [service["type"] for _, service in services.items()], + [service["lineno"] for _, service in services.items()], + ) + + +async def replace_condition(condition, conditions_lines, cnd_strt_lineno, index): + cnd_strt_lineno = cnd_strt_lineno - 1 # conversion from file numeration to list numeration + all_lines = conditions_lines.copy() + condition = "".join([condition.data.python.action + "\n\n"]) + new_cnd_lines = condition.split("\n") + + old_cnd_lines_num = 0 + for lineno, line in enumerate(all_lines[cnd_strt_lineno:]): + if line[:4] == "def " and lineno != 0: + break + old_cnd_lines_num += 1 + + next_func_location = cnd_strt_lineno + old_cnd_lines_num + + logger.debug("new_cnd_lines\n") + logger.debug(new_cnd_lines) + all_lines = all_lines[:cnd_strt_lineno] + new_cnd_lines + all_lines[next_func_location:] + + diff_in_lines = len(new_cnd_lines) - old_cnd_lines_num + logger.debug("diff_in_lines: %s", diff_in_lines) + logger.debug("cnd_strt_lineno: %s", cnd_strt_lineno) + + await _shift_cnds_in_index(index, cnd_strt_lineno, diff_in_lines) + return all_lines + + +def save_version_to_git(build_id: int, bot_repo: Repo): + # Save current version of frontend script + df_designer_repo = get_repo(settings.frontend_flows_path.parent) + commit_changes(df_designer_repo, f"Save script: {build_id}") + df_designer_repo.create_tag(build_id) + logger.info("Flows saved to git with tag %s", build_id) + + # Save built version of dff bot + commit_changes(bot_repo, f"create build: {build_id}") + bot_repo.create_tag(build_id) + logger.info("Bot saved to git with tag %s", build_id) + + +async def translator(build_id: int, project_dir: str, custom_dir: str = "custom"): + bot_repo = get_repo(Path(project_dir) / "bot") + # check that there's no already existing tag {build_id} + for tag in bot_repo.tags: + if tag.name == str(build_id): + raise ValueError(f"Tag {build_id} already exists") + + index = get_index() + await index.load() + index.logger.debug("Loaded index '%s'", index.index) + + frontend_graph_path, script_path, custom_conditions_file = get_db_paths(Path(project_dir), custom_dir) + + script = { + "CONFIG": {"custom_dir": "/".join(["..", custom_dir])}, + } + flow_graph = await read_conf(frontend_graph_path) + + nodes = organize_graph_according_to_nodes(flow_graph, script) + + with open(custom_conditions_file, "r", encoding="UTF-8") as file: + conditions_lines = file.readlines() + + for flow in flow_graph["flows"]: + for edge in flow.data.edges: + if edge.source in nodes and edge.target in nodes: + condition = get_condition(nodes, edge) + if condition is None: + logger.error( + "A condition of edge '%s' - '%s' and id of '%s' is not found in the corresponding node", + edge.source, + edge.target, + edge.sourceHandle, + ) + continue + + if condition.name not in (cnd_names := index.index): + logger.debug("Adding condition: %s", condition.name) + cnd_lineno = len(conditions_lines) + conditions_lines = append_condition(condition, conditions_lines) + await index.indexit(condition.name, "condition", cnd_lineno + 1) + else: + logger.debug("Replacing condition: %s", condition.name) + conditions_lines = await replace_condition( + condition, conditions_lines, cnd_names[condition.name]["lineno"], index + ) + + add_transitions(nodes, edge, condition) + else: + logger.error("A node of edge '%s-%s' is not found in nodes", edge.source, edge.target) + + fill_nodes_into_script(nodes, script) + + write_conditions_to_file(conditions_lines, custom_conditions_file) + await write_conf(script, script_path) + + save_version_to_git(build_id, bot_repo) diff --git a/backend/df_designer/app/services/process.py b/backend/df_designer/app/services/process.py index 3b84fd8d..e5c2aa10 100644 --- a/backend/df_designer/app/services/process.py +++ b/backend/df_designer/app/services/process.py @@ -1,14 +1,14 @@ -import aiofiles import asyncio -from datetime import datetime import logging +from abc import ABC, abstractmethod +from datetime import datetime from pathlib import Path -from typing import List -from omegaconf import OmegaConf +from typing import List, Optional -from app.core.logger_config import get_logger, setup_logging from app.core.config import settings -from app.db.base import write_conf, read_conf +from app.core.logger_config import get_logger, setup_logging +from app.db.base import read_conf, write_conf +from app.schemas.process_status import Status def _map_to_str(params: dict): @@ -19,72 +19,88 @@ def _map_to_str(params: dict): params[k] = str(v) -class Process: - def __init__(self, id_: int, preset_end_status = ""): +class Process(ABC): + def __init__(self, id_: int, preset_end_status=""): self.id: int = id_ self.preset_end_status: str = preset_end_status - self.status: str = "null" + self.status: Status = Status.NULL self.timestamp: datetime = datetime.now() self.log_path: Path - self.process: asyncio.subprocess.Process # pylint: disable=no-member #TODO: is naming ok? + self.lock = asyncio.Lock() + self.process: asyncio.subprocess.Process # pylint: disable=no-member #TODO: is naming ok? self.logger: logging.Logger async def start(self, cmd_to_run): - async with aiofiles.open(self.log_path, "a", encoding="UTF-8") as file: #TODO: log to files - self.process = await asyncio.create_subprocess_exec( - *cmd_to_run.split(), - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - stdin=asyncio.subprocess.PIPE, - ) - - def get_full_info(self) -> dict: - self.check_status() - return { - key: getattr(self, key) for key in self.__dict__ if key not in ["process", "logger"] - } - - def set_full_info(self, params_dict): - for key, value in params_dict.items(): - setattr(self, key, value) - + self.process = await asyncio.create_subprocess_exec( + *cmd_to_run.split(), + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + stdin=asyncio.subprocess.PIPE, + ) + + async def get_full_info(self, attributes: list) -> dict: + await self.check_status() + info = {key: getattr(self, key) for key in self.__dict__ if key in attributes} + if "status" in attributes: + info["status"] = self.status.value + + return info + + @abstractmethod async def update_db_info(self): - pass + raise NotImplementedError async def periodically_check_status(self): while True: - await self.update_db_info() # check status and update db - self.logger.info("Status of process '%s': %s",self.id, self.status) - if self.status in ["stopped", "completed", "failed"]: + await self.update_db_info() # check status and update db + self.logger.info("Status of process '%s': %s", self.id, self.status) + if self.status in [Status.STOPPED, Status.COMPLETED, Status.FAILED]: break - await asyncio.sleep(2) #TODO: ?sleep time shouldn't be constant - - def check_status(self) -> str: - """Returns the process status [null, running, completed, failed, stopped]. - - null: When a process is initiated but not started yet. This condition is unusual and typically indicates - incorrect usage or a process misuse in backend logic. - - running: returncode is None - - completed: returncode is 0 - - failed: returncode is 1 - - stopped: returncode is -15 - - "Exited with return code: {self.process.returncode}. A non-zero return code indicates an error": Otherwise + await asyncio.sleep(2) # TODO: ?sleep time shouldn't be constant + + async def check_status(self) -> Status: + """Returns the process status. + - Status.NULL: When a process is initiated but not started yet. This condition is unusual and typically + indicates incorrect usage or a process misuse in backend logic. + - Status.ALIVE: process is alive and ready to communicate + - Status.RUNNING: process is still trying to get alive. no communication + - Status.COMPLETED: returncode is 0 + - Status.FAILED: returncode is 1 + - Status.STOPPED: returncode is -15 + - Status.FAILED_WITH_UNEXPECTED_CODE: failed with other returncode """ if self.process is None: - self.status = "null" + self.status = Status.NULL + # if process is already alive, don't interrupt potential open channels by checking status periodically. elif self.process.returncode is None: - self.status = "running" + if self.status == Status.ALIVE: + self.status = Status.ALIVE + else: + if await self.is_alive(): + self.status = Status.ALIVE + else: + self.status = Status.RUNNING + elif self.process.returncode == 0: - self.status = "completed" + self.status = Status.COMPLETED elif self.process.returncode == 1: - self.status = "failed" + self.status = Status.FAILED elif self.process.returncode == -15: - self.status = "stopped" + self.status = Status.STOPPED else: - self.logger.warning( + self.logger.error( "Unexpected code was returned: '%s'. A non-zero return code indicates an error.", - self.process.returncode + self.process.returncode, ) - return str(self.process.returncode) + self.status = Status.FAILED_WITH_UNEXPECTED_CODE + + if self.status not in [Status.NULL, Status.RUNNING, Status.ALIVE, Status.STOPPED]: + stdout, stderr = await self.process.communicate() + if stdout: + self.logger.info(f"[stdout]\n{stdout.decode()}") + if stderr: + self.logger.error(f"[stderr]\n{stderr.decode()}") + return self.status async def stop(self): @@ -95,22 +111,41 @@ async def stop(self): self.logger.debug("Terminating process '%s'", self.id) self.process.terminate() await self.process.wait() + self.logger.debug("Process returencode '%s' ", self.process.returncode) + except ProcessLookupError as exc: self.logger.error("Process '%s' not found. It may have already exited.", self.id) raise ProcessLookupError from exc - def read_stdout(self): - if self.process is None: - self.logger.error("Cannot read stdout from a process '%s' that has not started yet.", self.id) - raise RuntimeError + async def read_stdout(self): + async with self.lock: + if self.process is None: + self.logger.error("Cannot read stdout from a process '%s' that has not started yet.", self.id) + raise RuntimeError - return self.process.stdout.readline() + return await self.process.stdout.readline() - def write_stdin(self, message): + async def write_stdin(self, message): if self.process is None: self.logger.error("Cannot write into stdin of a process '%s' that has not started yet.", self.id) raise RuntimeError self.process.stdin.write(message) + await self.process.stdin.drain() + + async def is_alive(self) -> bool: + timeout = 0.5 + message = b"Hi\n" + try: + # Attempt to write and read from the process with a timeout. + await self.write_stdin(message) + output = await asyncio.wait_for(self.read_stdout(), timeout=timeout) + if not output: + return False + self.logger.debug("Process output afer communication: %s", output.decode()) + return True + except asyncio.exceptions.TimeoutError: + self.logger.debug("Process did not accept input within the timeout period.") + return False class RunProcess(Process): @@ -118,15 +153,20 @@ def __init__(self, id_: int, build_id: int = None, preset_end_status: str = ""): super().__init__(id_, preset_end_status) self.build_id: int = build_id - log_name: str = "_".join([str(id_), datetime.now().time().strftime("%H%M%S")]) - self.log_path: Path = setup_logging("runs", log_name) + self.log_path: Path = setup_logging("runs", self.id, self.timestamp) self.logger = get_logger(str(id_), self.log_path) + async def get_full_info(self, attributes: Optional[list] = None) -> dict: + if attributes is None: + attributes = ["id", "preset_end_status", "status", "timestamp", "log_path", "build_id"] + return await super().get_full_info(attributes) + async def update_db_info(self): # save current run info into runs_path + self.logger.info("Updating db run info") runs_conf = await read_conf(settings.runs_path) - run_params = self.get_full_info() + run_params = await self.get_full_info() _map_to_str(run_params) for run in runs_conf: @@ -139,34 +179,35 @@ async def update_db_info(self): await write_conf(runs_conf, settings.runs_path) - # save current run info into the correspoinding build in builds_path + # save current run id into the correspoinding build in builds_path builds_conf = await read_conf(settings.builds_path) for build in builds_conf: if build.id == run_params["build_id"]: - for run in build.runs: - if run.id == run_params["id"]: - for key, value in run_params.items(): - setattr(run, key, value) - break - else: - build.runs.append(run_params) + if run_params["id"] not in build.run_ids: + build.run_ids.append(run_params["id"]) + break + await write_conf(builds_conf, settings.builds_path) class BuildProcess(Process): def __init__(self, id_: int, preset_end_status: str = ""): super().__init__(id_, preset_end_status) - self.runs: List[int] = [] + self.run_ids: List[int] = [] - log_name: str = "_".join([str(id_), datetime.now().time().strftime("%H%M%S")]) - self.log_path: Path = setup_logging("builds", log_name) + self.log_path: Path = setup_logging("builds", self.id, self.timestamp) self.logger = get_logger(str(id_), self.log_path) + async def get_full_info(self, attributes: Optional[list] = None) -> dict: + if attributes is None: + attributes = ["id", "preset_end_status", "status", "timestamp", "log_path", "run_ids"] + return await super().get_full_info(attributes) + async def update_db_info(self): # save current build info into builds_path builds_conf = await read_conf(settings.builds_path) - build_params = self.get_full_info() + build_params = await self.get_full_info() _map_to_str(build_params) for build in builds_conf: diff --git a/backend/df_designer/app/services/process_manager.py b/backend/df_designer/app/services/process_manager.py index a6c92268..d876f820 100644 --- a/backend/df_designer/app/services/process_manager.py +++ b/backend/df_designer/app/services/process_manager.py @@ -1,12 +1,13 @@ from pathlib import Path -from typing import List, Type, Optional +from typing import List + from omegaconf import OmegaConf -from app.core.logger_config import get_logger -from app.services.process import BuildProcess, RunProcess -from app.schemas.preset import Preset from app.core.config import settings +from app.core.logger_config import get_logger from app.db.base import read_conf, read_logs +from app.schemas.preset import Preset +from app.services.process import BuildProcess, RunProcess logger = get_logger(__name__) @@ -14,32 +15,35 @@ class ProcessManager: def __init__(self): self.processes = {} + self.last_id: int def get_last_id(self): - """Get the process_id of the last started process""" - return list(self.processes.keys())[-1] + return self.last_id async def stop(self, id_): + if id_ not in self.processes: + logger.error("Process with id '%s' not found in recent running processes", id_) + raise ProcessLookupError try: await self.processes[id_].stop() - except (RuntimeError, ProcessLookupError) as exc: - raise exc + except (RuntimeError, ProcessLookupError): + raise async def check_status(self, id_): await self.processes[id_].periodically_check_status() - def get_status(self, id_): - return self.processes[id_].check_status() + async def get_status(self, id_): + return await self.processes[id_].check_status() async def get_process_info(self, id_: int, path: Path): db_conf = await read_conf(path) conf_dict = OmegaConf.to_container(db_conf, resolve=True) - return next((db_process for db_process in conf_dict if db_process["id"]==id_), None) + return next((db_process for db_process in conf_dict if db_process["id"] == id_), None) async def get_full_info(self, offset: int, limit: int, path: Path) -> List[dict]: db_conf = await read_conf(path) conf_dict = OmegaConf.to_container(db_conf, resolve=True) - return conf_dict[offset:offset+limit] + return conf_dict[offset : offset + limit] async def fetch_process_logs(self, id_: int, offset: int, limit: int, path: Path): process_info = await self.get_process_info(id_, path) @@ -50,26 +54,23 @@ async def fetch_process_logs(self, id_: int, offset: int, limit: int, path: Path log_file = process_info["log_path"] try: logs = await read_logs(log_file) + logs = [log for log in logs if log.strip()] except FileNotFoundError: logger.error("Log file '%s' not found", log_file) return None if offset > len(logs): logger.info("Offset '%s' is out of bounds ('%s' logs found)", offset, len(logs)) - return None + return None # TODO: raise error! logger.info("Returning %s logs", len(logs)) - return logs[offset:offset+limit] - + return logs[offset : offset + limit] class RunManager(ProcessManager): def __init__(self): super().__init__() - def get_last_id(self): - return self.last_id - async def start(self, build_id: int, preset: Preset): cmd_to_run = f"dflowd run_bot {build_id} --preset {preset.end_status}" self.last_id = max([run["id"] for run in await self.get_full_info(0, 10000)]) @@ -77,17 +78,10 @@ async def start(self, build_id: int, preset: Preset): id_ = self.last_id process = RunProcess(id_, build_id, preset.end_status) await process.start(cmd_to_run) + process.logger.debug("Started process. status: '%s'", process.process.returncode) self.processes[id_] = process - async def get_min_info(self) -> List[dict]: - runs_conf = await read_conf(settings.runs_path) - minimum_params = ["id", "build_id", "preset_end_status", "status", "timestamp"] - - minimum_info = [] - for run in runs_conf: - minimum_info.append({param: getattr(run, param) for param in minimum_params}) - - return minimum_info + return self.last_id async def get_run_info(self, id_: int): return await super().get_process_info(id_, settings.runs_path) @@ -98,15 +92,12 @@ async def get_full_info(self, offset: int, limit: int, path: Path = settings.run async def fetch_run_logs(self, run_id: int, offset: int, limit: int): return await self.fetch_process_logs(run_id, offset, limit, settings.runs_path) + class BuildManager(ProcessManager): def __init__(self): super().__init__() - def get_last_id(self): - return self.last_id - async def start(self, preset: Preset): - cmd_to_run = f"dflowd build_bot --preset {preset.end_status}" self.last_id = max([build["id"] for build in await self.get_full_info(0, 10000)]) self.last_id += 1 id_ = self.last_id @@ -115,26 +106,27 @@ async def start(self, preset: Preset): await process.start(cmd_to_run) self.processes[id_] = process - async def get_min_info(self) -> List[dict]: - builds_conf = await read_conf(settings.builds_path) - minimum_params = ["id", "preset_end_status", "status", "timestamp", "runs"] - - minimum_info = [] - for build in builds_conf: - info = {} - for param in minimum_params: - if param != "runs": - info.update({param: getattr(build, param)}) - else: - info.update({"run_ids": [run.id for run in build.runs]}) - minimum_info.append(info) - return minimum_info + return self.last_id - async def get_build_info(self, id_: int): - return await super().get_process_info(id_, settings.builds_path) + async def get_build_info(self, id_: int, run_manager): + builds_info = await self.get_full_info_with_runs_info(run_manager, offset=0, limit=10**5) + return next((build for build in builds_info if build["id"] == id_), None) async def get_full_info(self, offset: int, limit: int, path: Path = settings.builds_path): return await super().get_full_info(offset, limit, path) + async def get_full_info_with_runs_info(self, run_manager, offset: int, limit: int): + builds_info = await self.get_full_info(offset=offset, limit=limit) + runs_info = await run_manager.get_full_info(offset=0, limit=10**5) + for build in builds_info: + del build["run_ids"] + build["runs"] = [] + for run in runs_info: + if build["id"] == run["build_id"]: + run_without_build_id = {k: v for k, v in run.items() if k != "build_id"} + build["runs"].append(run_without_build_id) + + return builds_info + async def fetch_build_logs(self, build_id: int, offset: int, limit: int): return await self.fetch_process_logs(build_id, offset, limit, settings.builds_path) diff --git a/backend/df_designer/app/services/websocket_manager.py b/backend/df_designer/app/services/websocket_manager.py index 75de6327..ab0a03ad 100644 --- a/backend/df_designer/app/services/websocket_manager.py +++ b/backend/df_designer/app/services/websocket_manager.py @@ -1,24 +1,26 @@ import asyncio from asyncio.tasks import Task +from typing import Dict, Set + from fastapi import WebSocket, WebSocketDisconnect -from typing import Optional, Set, Dict from app.core.logger_config import get_logger from app.services.process_manager import ProcessManager logger = get_logger(__name__) + class WebSocketManager: def __init__(self): - self.pending_tasks : Dict[WebSocket, Set[Task]] = dict() + self.pending_tasks: Dict[WebSocket, Set[Task]] = dict() self.active_connections: list[WebSocket] = [] - async def connect(self, websocket: WebSocket): await websocket.accept() self.active_connections.append(websocket) def disconnect(self, websocket: WebSocket): + # TODO: await websocket.close() if websocket in self.pending_tasks: logger.info("Cancelling pending tasks") for task in self.pending_tasks[websocket]: @@ -28,9 +30,11 @@ def disconnect(self, websocket: WebSocket): def check_status(self, websocket: WebSocket): if websocket in self.active_connections: - return websocket ## return Status! + return websocket # return Status! - async def send_process_output_to_websocket(self, run_id: int, process_manager: ProcessManager, websocket: WebSocket): + async def send_process_output_to_websocket( + self, run_id: int, process_manager: ProcessManager, websocket: WebSocket + ): """Read and forward process output to the websocket client.""" try: while True: @@ -40,18 +44,22 @@ async def send_process_output_to_websocket(self, run_id: int, process_manager: P await websocket.send_text(response.decode().strip()) except WebSocketDisconnect: logger.info("Websocket connection is closed by client") - except RuntimeError as exc: - raise exc + except RuntimeError: + raise - async def forward_websocket_messages_to_process(self, run_id: int, process_manager: ProcessManager, websocket: WebSocket): + async def forward_websocket_messages_to_process( + self, run_id: int, process_manager: ProcessManager, websocket: WebSocket + ): """Listen for messages from the websocket and send them to the subprocess.""" try: while True: user_message = await websocket.receive_text() - process_manager.processes[run_id].write_stdin(user_message.encode() + b'\n') + if not user_message: + break + await process_manager.processes[run_id].write_stdin(user_message.encode() + b"\n") except asyncio.CancelledError: logger.info("Websocket connection is closed") except WebSocketDisconnect: logger.info("Websocket connection is closed by client") - except RuntimeError as exc: - raise exc + except RuntimeError: + raise diff --git a/backend/df_designer/app/tests/confest.py b/backend/df_designer/app/tests/__init__.py similarity index 100% rename from backend/df_designer/app/tests/confest.py rename to backend/df_designer/app/tests/__init__.py diff --git a/backend/df_designer/app/tests/api/test_bot.py b/backend/df_designer/app/tests/api/test_bot.py index e69de29b..cc49de0a 100644 --- a/backend/df_designer/app/tests/api/test_bot.py +++ b/backend/df_designer/app/tests/api/test_bot.py @@ -0,0 +1,172 @@ +import pytest +from fastapi import BackgroundTasks, HTTPException, WebSocket + +from app.api.api_v1.endpoints.bot import ( + _check_process_status, + _stop_process, + check_build_processes, + check_run_processes, + connect, + get_build_logs, + get_run_logs, + start_build, + start_run, +) +from app.schemas.process_status import Status +from app.services.process import RunProcess +from app.services.process_manager import BuildManager, RunManager +from app.services.websocket_manager import WebSocketManager + +PROCESS_ID = 0 +RUN_ID = 42 +BUILD_ID = 43 + + +@pytest.mark.parametrize("process_type, process_manager", [("build", BuildManager), ("run", RunManager)]) +@pytest.mark.asyncio +async def test_stop_process_success(mocker, process_type, process_manager): + mock_stop = mocker.AsyncMock() + mocker.patch.object(process_manager, "stop", mock_stop) + + # Call the function under test + await _stop_process(PROCESS_ID, process_manager(), process_type) + + # Assert the stop method was called once with the correct id + mock_stop.assert_awaited_once_with(PROCESS_ID) + + +# TODO: take into consideration the errors when process type is build +@pytest.mark.parametrize("error_type", [RuntimeError, ProcessLookupError]) +@pytest.mark.asyncio +async def test_stop_process_error(mocker, error_type): + mock_stop = mocker.AsyncMock(side_effect=error_type) + mocker.patch.object(RunManager, "stop", mock_stop) + + process_type = "run" + + with pytest.raises(HTTPException) as exc_info: + await _stop_process(PROCESS_ID, RunManager(), process_type) + + # Assert the stop method was called once with the correct id + assert exc_info.value.status_code == 404 + mock_stop.assert_awaited_once_with(PROCESS_ID) + + +# TODO: check the errors +@pytest.mark.asyncio +async def test_check_process_status(mocker): + mocked_process_manager = mocker.MagicMock() + mocker.patch.object(mocked_process_manager, "processes", {PROCESS_ID: mocker.MagicMock()}) + mocker.patch.object(mocked_process_manager, "get_status", mocker.AsyncMock(return_value=Status.ALIVE)) + + response = await _check_process_status(PROCESS_ID, mocked_process_manager) + + assert response == {"status": "alive"} + mocked_process_manager.get_status.assert_awaited_once_with(0) + + +@pytest.mark.asyncio +async def test_start_build(mocker): + build_manager = mocker.MagicMock() + preset = mocker.MagicMock() + + start = mocker.AsyncMock(return_value=BUILD_ID) + mocker.patch.multiple(build_manager, start=start, check_status=mocker.AsyncMock()) + mocker.patch.multiple(preset, wait_time=0, end_status="loop") + + response = await start_build(preset, background_tasks=BackgroundTasks(), build_manager=build_manager) + start.assert_awaited_once_with(preset) + assert response == {"status": "ok", "build_id": BUILD_ID} + + +@pytest.mark.asyncio +async def test_check_build_processes_some_info(mocker, pagination): + build_manager = mocker.MagicMock(spec=BuildManager()) + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_build_processes(BUILD_ID, build_manager, run_manager, pagination) + + build_manager.get_build_info.assert_awaited_once_with(BUILD_ID, run_manager) + + +@pytest.mark.asyncio +async def test_check_build_processes_all_info(mocker, pagination): + build_id = None + build_manager = mocker.MagicMock(spec=BuildManager()) + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_build_processes(build_id, build_manager, run_manager, pagination) + + build_manager.get_full_info_with_runs_info.assert_awaited_once_with( + run_manager, offset=pagination.offset(), limit=pagination.limit + ) + + +@pytest.mark.asyncio +async def test_get_build_logs(mocker, pagination): + build_manager = mocker.MagicMock(spec=BuildManager()) + + await get_build_logs(BUILD_ID, build_manager, pagination) + + build_manager.fetch_build_logs.assert_awaited_once_with(BUILD_ID, pagination.offset(), pagination.limit) + + +@pytest.mark.asyncio +async def test_start_run(mocker): + run_manager = mocker.MagicMock() + preset = mocker.MagicMock() + + start = mocker.AsyncMock(return_value=RUN_ID) + mocker.patch.multiple(run_manager, start=start, check_status=mocker.AsyncMock()) + mocker.patch.multiple(preset, wait_time=0, end_status="loop") + + response = await start_run( + build_id=BUILD_ID, preset=preset, background_tasks=BackgroundTasks(), run_manager=run_manager + ) + start.assert_awaited_once_with(BUILD_ID, preset) + assert response == {"status": "ok", "run_id": RUN_ID} + + +@pytest.mark.asyncio +async def test_check_run_processes_some_info(mocker, pagination): + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_run_processes(RUN_ID, run_manager, pagination) + + run_manager.get_run_info.assert_awaited_once_with(RUN_ID) + + +@pytest.mark.asyncio +async def test_check_run_processes_all_info(mocker, pagination): + run_id = None + run_manager = mocker.MagicMock(spec=RunManager()) + + await check_run_processes(run_id, run_manager, pagination) + + run_manager.get_full_info.assert_awaited_once_with(offset=pagination.offset(), limit=pagination.limit) + + +@pytest.mark.asyncio +async def test_get_run_logs(mocker, pagination): + run_manager = mocker.MagicMock(spec=RunManager()) + + await get_run_logs(RUN_ID, run_manager, pagination) + + run_manager.fetch_run_logs.assert_awaited_once_with(RUN_ID, pagination.offset(), pagination.limit) + + +@pytest.mark.asyncio +async def test_connect(mocker): + websocket = mocker.MagicMock(spec=WebSocket) + websocket_manager = mocker.MagicMock(spec=WebSocketManager()) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(RUN_ID)) + run_manager.processes = {RUN_ID: run_process} + mocker.patch.object(websocket, "query_params", {"run_id": str(RUN_ID)}) + + await connect(websocket, websocket_manager, run_manager) + + websocket_manager.connect.assert_awaited_once_with(websocket) + websocket_manager.send_process_output_to_websocket.assert_awaited_once_with(RUN_ID, run_manager, websocket) + websocket_manager.forward_websocket_messages_to_process.assert_awaited_once_with(RUN_ID, run_manager, websocket) + websocket_manager.disconnect.assert_called_once_with(websocket) diff --git a/backend/df_designer/app/tests/api/test_flows.py b/backend/df_designer/app/tests/api/test_flows.py new file mode 100644 index 00000000..54867bd3 --- /dev/null +++ b/backend/df_designer/app/tests/api/test_flows.py @@ -0,0 +1,40 @@ +# create test flows function here +import pytest +from omegaconf import OmegaConf + +from app.api.api_v1.endpoints.flows import flows_get, flows_post +from app.core.config import settings + + +@pytest.mark.asyncio +async def test_flows_get(mocker): + read_conf = mocker.patch("app.api.api_v1.endpoints.flows.read_conf", return_value=OmegaConf.create({"foo": "bar"})) + + bot_repo = mocker.MagicMock() + tag = mocker.MagicMock() + tag.name = "43" + bot_repo.tags = [tag] + mocker.patch("app.api.api_v1.endpoints.flows.Repo.init", return_value=bot_repo) + + response = await flows_get("43") + + read_conf.assert_called_with(settings.frontend_flows_path) + bot_repo.git.checkout.assert_called_once_with("43", settings.frontend_flows_path.name) + assert response["status"] == "ok" + assert response["data"] == {"foo": "bar"} + + +@pytest.mark.asyncio +async def test_flows_post(mocker): + write_conf = mocker.patch("app.api.api_v1.endpoints.flows.write_conf") + get_repo = mocker.patch("app.api.api_v1.endpoints.flows.get_repo") + commit_changes = mocker.patch("app.api.api_v1.endpoints.flows.commit_changes") + + response = await flows_post({"foo": "bar"}, "save1") + + write_conf.assert_called_with({"foo": "bar"}, settings.frontend_flows_path) + get_repo.assert_called_with(settings.frontend_flows_path.parent) + repo = get_repo.return_value + commit_changes.assert_called_with(repo, "Save frontend flows") + repo.create_tag.assert_called_with("save1") + assert response["status"] == "ok" diff --git a/backend/df_designer/app/tests/conftest.py b/backend/df_designer/app/tests/conftest.py new file mode 100644 index 00000000..7e316834 --- /dev/null +++ b/backend/df_designer/app/tests/conftest.py @@ -0,0 +1,79 @@ +from contextlib import asynccontextmanager +from typing import Generator + +import httpx +import pytest +from fastapi.testclient import TestClient +from httpx import AsyncClient + +from app.main import app +from app.schemas.pagination import Pagination +from app.schemas.preset import Preset +from app.services.process import RunProcess +from app.services.process_manager import BuildManager, RunManager +from app.services.websocket_manager import WebSocketManager + + +async def start_process(async_client: AsyncClient, endpoint, preset_end_status) -> httpx.Response: + return await async_client.post( + endpoint, + json={"wait_time": 0.1, "end_status": preset_end_status}, + ) + + +@asynccontextmanager +async def override_dependency(mocker_obj, get_manager_func): + process_manager = get_manager_func() + process_manager.check_status = mocker_obj.AsyncMock() + app.dependency_overrides[get_manager_func] = lambda: process_manager + try: + yield process_manager + finally: + for _, process in process_manager.processes.items(): + if process.process.returncode is None: + await process.stop() + app.dependency_overrides = {} + + +@pytest.fixture +def client() -> Generator: + with TestClient(app=app) as client: + yield client + + +@pytest.fixture(scope="session") +def preset() -> Preset: + return Preset( + wait_time=0, + end_status="loop", + ) + + +@pytest.fixture +def pagination() -> Pagination: + return Pagination() + + +@pytest.fixture() +def run_process(): + async def _run_process(cmd_to_run): + process = RunProcess(id_=0) + await process.start(cmd_to_run) + return process + + return _run_process + + +@pytest.fixture() +def run_manager(): + return RunManager() + + +@pytest.fixture() +def build_manager(): + return BuildManager() + + +@pytest.fixture +def websocket_manager(): + return WebSocketManager() diff --git a/backend/df_designer/app/tests/e2e/__init__.py b/backend/df_designer/app/tests/e2e/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/e2e/test_e2e.py b/backend/df_designer/app/tests/e2e/test_e2e.py new file mode 100644 index 00000000..82c69863 --- /dev/null +++ b/backend/df_designer/app/tests/e2e/test_e2e.py @@ -0,0 +1,63 @@ +import asyncio + +import httpx +import pytest +from httpx_ws import aconnect_ws +from httpx_ws.transport import ASGIWebSocketTransport + +from app.api.deps import get_build_manager, get_run_manager +from app.core.config import settings +from app.core.logger_config import get_logger +from app.main import app +from app.schemas.process_status import Status +from app.tests.conftest import override_dependency, start_process +from app.utils.git_cmd import delete_tag, get_repo + +logger = get_logger(__name__) + + +async def _assert_process_status(response, process_manager): + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + process_manager.check_status.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_all(mocker): + async with httpx.AsyncClient(transport=ASGIWebSocketTransport(app)) as client: + async with override_dependency(mocker, get_build_manager) as process_manager: + mocker.patch("app.services.process_manager.max", return_value=9999) + response = await start_process( + client, + endpoint="http://localhost:8000/api/v1/bot/build/start", + preset_end_status="success", + ) + + build_id = process_manager.get_last_id() + + await _assert_process_status(response, process_manager) + try: + await asyncio.wait_for(process_manager.processes[build_id].process.wait(), timeout=20) + except asyncio.exceptions.TimeoutError as exc: + raise Exception( + "Process with expected end status Status.ALIVE timed out with status Status.RUNNING." + ) from exc + assert await process_manager.get_status(build_id) == Status.COMPLETED + + async with override_dependency(mocker, get_run_manager) as process_manager: + response = await start_process( + client, + endpoint=f"http://localhost:8000/api/v1/bot/run/start/{build_id}", + preset_end_status="success", + ) + + run_id = process_manager.get_last_id() + + await _assert_process_status(response, process_manager) + await asyncio.sleep(10) + assert await process_manager.get_status(run_id) == Status.ALIVE + + async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client) as ws: + message = await ws.receive_text() + assert message == "Start chatting" + repo = get_repo(settings.work_directory / "bot") + delete_tag(repo, "10000") diff --git a/backend/df_designer/app/tests/integration/__init__.py b/backend/df_designer/app/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/integration/test_api_integration.py b/backend/df_designer/app/tests/integration/test_api_integration.py new file mode 100644 index 00000000..5cc6ae5d --- /dev/null +++ b/backend/df_designer/app/tests/integration/test_api_integration.py @@ -0,0 +1,161 @@ +import asyncio + +import httpx +import pytest +from httpx import ASGITransport, AsyncClient +from httpx_ws import aconnect_ws +from httpx_ws.transport import ASGIWebSocketTransport + +from app.api.deps import get_build_manager, get_run_manager +from app.core.logger_config import get_logger +from app.main import app +from app.schemas.process_status import Status +from app.tests.conftest import override_dependency, start_process + +logger = get_logger(__name__) + + +async def _assert_process_status(response, process_manager, expected_end_status): + assert response.json().get("status") == "ok", "Start process response status is not 'ok'" + process_manager.check_status.assert_awaited_once() + + try: + await asyncio.wait_for( + process_manager.processes[process_manager.last_id].process.wait(), timeout=10 + ) # TODO: Consider making this timeout configurable + except asyncio.exceptions.TimeoutError as exc: + if expected_end_status in [Status.ALIVE, Status.RUNNING]: + logger.debug("Loop process timed out. Expected behavior.") + else: + raise Exception( + f"Process with expected end status '{expected_end_status}' timed out with status 'running'." + ) from exc + + process_id = process_manager.last_id + logger.debug("Process id is %s", process_id) + current_status = await process_manager.get_status(process_id) + assert ( + current_status == expected_end_status + ), f"Current process status '{current_status}' did not match the expected '{expected_end_status}'" + + return current_status + + +async def _test_start_process(mocker_obj, get_manager_func, endpoint, preset_end_status, expected_end_status): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(mocker_obj, get_manager_func) as process_manager: + response = await start_process(async_client, endpoint, preset_end_status) + current_status = await _assert_process_status(response, process_manager, expected_end_status) + + if current_status == Status.RUNNING: + process_manager.processes[process_manager.last_id].process.terminate() + await process_manager.processes[process_manager.last_id].process.wait() + + +async def _test_stop_process(mocker, get_manager_func, start_endpoint, stop_endpoint): + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as async_client: + async with override_dependency(mocker, get_manager_func) as manager: + start_response = await start_process(async_client, start_endpoint, preset_end_status="loop") + assert start_response.status_code == 201 + logger.debug("Processes: %s", manager.processes) + + last_id = manager.get_last_id() + logger.debug("Last id: %s, type: %s", last_id, type(last_id)) + logger.debug("Process status %s", await manager.get_status(last_id)) + + stop_response = await async_client.get(f"{stop_endpoint}/{last_id}") + assert stop_response.status_code == 200 + assert stop_response.json() == {"status": "ok"} + + +# Test flows endpoints and interaction with db (read and write conf) +def test_flows(client): # noqa: F811 + get_response = client.get("/api/v1/flows/43") + assert get_response.status_code == 200 + data = get_response.json()["data"] + assert "flows" in data + + response = client.post("/api/v1/flows/test_save1", json=data) + assert response.status_code == 200 + + +# def test_get_build_status(client): +# pass + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "end_status, process_status", [("failure", Status.FAILED), ("loop", Status.RUNNING), ("success", Status.COMPLETED)] +) +async def test_start_build(mocker, end_status, process_status): + await _test_start_process( + mocker, + get_build_manager, + endpoint="/api/v1/bot/build/start", + preset_end_status=end_status, + expected_end_status=process_status, + ) + + +@pytest.mark.asyncio +async def test_stop_build(mocker): + await _test_stop_process( + mocker, get_build_manager, start_endpoint="/api/v1/bot/build/start", stop_endpoint="/api/v1/bot/build/stop" + ) + + +# def test_get_run_status(client): +# pass + + +# Test processes of various end_status + Test integration with get_status. No db interaction (mocked processes) +@pytest.mark.asyncio +@pytest.mark.parametrize( + "end_status, process_status", [("failure", Status.FAILED), ("loop", Status.RUNNING), ("success", Status.ALIVE)] +) +async def test_start_run(mocker, end_status, process_status): + build_id = 43 + await _test_start_process( + mocker, + get_run_manager, + endpoint=f"/api/v1/bot/run/start/{build_id}", + preset_end_status=end_status, + expected_end_status=process_status, + ) + + +@pytest.mark.asyncio +async def test_stop_run(mocker): + build_id = 43 + await _test_stop_process( + mocker, + get_run_manager, + start_endpoint=f"/api/v1/bot/run/start/{build_id}", + stop_endpoint="/api/v1/bot/run/stop", + ) + + +@pytest.mark.asyncio +async def test_connect_to_ws(mocker): + build_id = 43 + + async with httpx.AsyncClient(transport=ASGIWebSocketTransport(app)) as client: + async with override_dependency(mocker, get_run_manager) as process_manager: + # Start a process + start_response = await start_process( + client, + endpoint=f"http://localhost:8000/api/v1/bot/run/start/{build_id}", + preset_end_status="success", + ) + assert start_response.status_code == 201 + process_manager.check_status.assert_awaited_once() + + run_id = process_manager.get_last_id() + logger.debug(f"run_id: {run_id}") + await asyncio.sleep(10) + + assert await process_manager.get_status(run_id) == Status.ALIVE + + async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client) as ws: + message = await ws.receive_text() + assert message == "Start chatting" diff --git a/backend/df_designer/app/tests/services/__init__.py b/backend/df_designer/app/tests/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/tests/services/test_process.py b/backend/df_designer/app/tests/services/test_process.py new file mode 100644 index 00000000..e6bdd273 --- /dev/null +++ b/backend/df_designer/app/tests/services/test_process.py @@ -0,0 +1,55 @@ +import asyncio + +import pytest + +from app.core.logger_config import get_logger +from app.schemas.process_status import Status + +logger = get_logger(__name__) + + +class TestRunProcess: + # def test_update_db_info(self, run_process): + # process = await run_process("echo 'Hello df_designer'") + # process.update_db_info() + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "cmd_to_run, status", + [ + ("sleep 10000", Status.RUNNING), + ("false", Status.FAILED), + ("echo Hello df_designer", Status.COMPLETED), + ], + ) + async def test_check_status(self, run_process, cmd_to_run, status): + process = await run_process(cmd_to_run) + await asyncio.sleep(2) + assert await process.check_status() == status + + # def test_periodically_check_status(self, run_process): + # process = await run_process("sleep 10000") + # run_process.periodically_check_status() + + @pytest.mark.asyncio + async def test_stop(self, run_process): + process = await run_process("sleep 10000") + await process.stop() + assert process.process.returncode == -15 + + @pytest.mark.asyncio + async def test_read_stdout(self, run_process): + process = await run_process("echo Hello df_designer") + output = await process.read_stdout() + assert output.strip().decode() == "Hello df_designer" + + @pytest.mark.asyncio + async def test_write_stdout(self, run_process): + process = await run_process("cat") + await process.write_stdin(b"DF_Designer team welcome you.\n") + output = await process.process.stdout.readline() + assert output.decode().strip() == "DF_Designer team welcome you." + + +# class TestBuildProcess: +# pass diff --git a/backend/df_designer/app/tests/services/test_process_manager.py b/backend/df_designer/app/tests/services/test_process_manager.py new file mode 100644 index 00000000..8dd5b9ae --- /dev/null +++ b/backend/df_designer/app/tests/services/test_process_manager.py @@ -0,0 +1,96 @@ +import pytest +from omegaconf import OmegaConf + +from app.core.logger_config import get_logger + +logger = get_logger(__name__) + +RUN_ID = 42 +BUILD_ID = 43 + + +class TestRunManager: + @pytest.mark.asyncio + async def test_start(self, mocker, preset, run_manager): # noqa: F811 + # Mock the RunProcess constructor whereever it's called in + # the process_manager file within the scope of this test function + run_process = mocker.patch("app.services.process_manager.RunProcess") + run_process_instance = run_process.return_value + run_process_instance.start = mocker.AsyncMock() + run_manager.get_full_info = mocker.AsyncMock(return_value=[{"id": RUN_ID}]) + + await run_manager.start(build_id=BUILD_ID, preset=preset) + + run_process.assert_called_once_with(run_manager.last_id, BUILD_ID, preset.end_status) + run_process_instance.start.assert_awaited_once_with(f"dflowd run_bot {BUILD_ID} --preset {preset.end_status}") + + assert run_manager.processes[run_manager.last_id] is run_process_instance + + @pytest.mark.asyncio + async def test_stop_success(self, mocker, run_manager): + run_manager.processes[RUN_ID] = mocker.MagicMock() + run_manager.processes[RUN_ID].stop = mocker.AsyncMock() + + await run_manager.stop(RUN_ID) + run_manager.processes[RUN_ID].stop.assert_awaited_once_with() + + @pytest.mark.asyncio + async def test_stop_with_error(self, run_manager): + with pytest.raises((RuntimeError, ProcessLookupError)): + await run_manager.stop(RUN_ID) + + # def test_check_status(self, run_manager, preset): + # pass + + @pytest.mark.asyncio + async def test_get_process_info(self, mocker, run_manager): + df_conf = OmegaConf.create( + f""" + - id: {RUN_ID} + status: stopped + """ + ) + df_conf_dict = { + "id": RUN_ID, + "status": "stopped", + } + + read_conf = mocker.patch("app.services.process_manager.read_conf") + read_conf.return_value = df_conf + + run_info = await run_manager.get_run_info(RUN_ID) + assert run_info == df_conf_dict + + @pytest.mark.asyncio + async def test_get_full_info(self, mocker, run_manager): + df_conf = OmegaConf.create( + f""" + - id: {RUN_ID} + status: stopped + - id: {RUN_ID + 1} + status: stopped + """ + ) + df_conf_dict = { + "id": RUN_ID, + "status": "stopped", + } + + read_conf = mocker.patch("app.services.process_manager.read_conf") + read_conf.return_value = df_conf + + run_info = await run_manager.get_full_info(0, 1) + assert run_info == [df_conf_dict] + + @pytest.mark.asyncio + async def test_fetch_run_logs(self, mocker, run_manager): + LOG_PATH = "df_designer/logs/runs/20240425/42_211545.log" + run_manager.get_process_info = mocker.AsyncMock(return_value={"id": RUN_ID, "log_path": LOG_PATH}) + + read_logs = mocker.patch("app.services.process_manager.read_logs", return_value=["log1", "log2"]) + + logs = await run_manager.fetch_run_logs(RUN_ID, 0, 1) + + run_manager.get_process_info.assert_awaited_once() + read_logs.assert_awaited_once_with(LOG_PATH) + assert logs == ["log1"] diff --git a/backend/df_designer/app/tests/services/test_websocket_manager.py b/backend/df_designer/app/tests/services/test_websocket_manager.py new file mode 100644 index 00000000..854b232c --- /dev/null +++ b/backend/df_designer/app/tests/services/test_websocket_manager.py @@ -0,0 +1,60 @@ +import pytest +from fastapi import WebSocket + +from app.services.process import RunProcess +from app.services.process_manager import RunManager + + +class TestWebSocketManager: + @pytest.mark.asyncio + async def test_connect(self, mocker, websocket_manager): + mocked_websocket = mocker.MagicMock(spec=WebSocket) + + await websocket_manager.connect(mocked_websocket) + + mocked_websocket.accept.assert_awaited_once_with() + assert mocked_websocket in websocket_manager.active_connections + + @pytest.mark.asyncio + async def test_disconnect(self, mocker, websocket_manager): + mocked_websocket = mocker.MagicMock(spec=WebSocket) + websocket_manager.active_connections.append(mocked_websocket) + websocket_manager.pending_tasks[mocked_websocket] = set() + + websocket_manager.disconnect(mocked_websocket) + + assert mocked_websocket not in websocket_manager.pending_tasks + assert mocked_websocket not in websocket_manager.active_connections + + @pytest.mark.asyncio + async def test_send_process_output_to_websocket(self, mocker, websocket_manager): + run_id = 42 + awaited_response = "Hello from DF-Designer" + + websocket = mocker.MagicMock(spec=WebSocket) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(run_id)) + run_process.read_stdout = mocker.AsyncMock(side_effect=[awaited_response.encode(), None]) + run_manager.processes = {run_id: run_process} + + await websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) + + assert run_process.read_stdout.call_count == 2 + websocket.send_text.assert_awaited_once_with(awaited_response) + + @pytest.mark.asyncio + async def test_forward_websocket_messages_to_process(self, mocker, websocket_manager): + run_id = 42 + awaited_message = "Hello from DF-Designer" + + websocket = mocker.MagicMock(spec=WebSocket) + websocket.receive_text = mocker.AsyncMock(side_effect=[awaited_message, None]) + run_manager = mocker.MagicMock(spec=RunManager()) + run_process = mocker.MagicMock(spec=RunProcess(run_id)) + run_process.write_stdin = mocker.AsyncMock() + run_manager.processes = {run_id: run_process} + + await websocket_manager.forward_websocket_messages_to_process(run_id, run_manager, websocket) + + assert websocket.receive_text.await_count == 2 + run_process.write_stdin.assert_called_once_with(awaited_message.encode() + b"\n") diff --git a/backend/df_designer/app/utils/__init__.py b/backend/df_designer/app/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/df_designer/app/utils/git_cmd.py b/backend/df_designer/app/utils/git_cmd.py new file mode 100644 index 00000000..f203a561 --- /dev/null +++ b/backend/df_designer/app/utils/git_cmd.py @@ -0,0 +1,18 @@ +from pathlib import Path + +from git import Repo + + +def commit_changes(repo, commit_message): + repo.git.add(A=True) + repo.index.commit(commit_message) + + +def get_repo(project_dir: Path): + repo = Repo(project_dir) + assert not repo.bare + return repo + + +def delete_tag(repo: Repo, tag_name: str): + repo.git.tag("-d", tag_name) diff --git a/backend/df_designer/poetry.lock b/backend/df_designer/poetry.lock index a4ecc2d0..f89a04f2 100644 --- a/backend/df_designer/poetry.lock +++ b/backend/df_designer/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -87,6 +87,40 @@ files = [ [package.dependencies] chardet = ">=3.0.2" +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2024.2.2" @@ -256,12 +290,14 @@ rich = "*" [[package]] name = "dff" -version = "0.6.4" +version = "0.6.4.dev0" description = "Dialog Flow Framework is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." optional = false -python-versions = "^3.8.1,!=3.9.7" -files = [] -develop = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +files = [ + {file = "dff-0.6.4.dev0-py3-none-any.whl", hash = "sha256:2bf1375bdde25492f623995bb148773d5f99b0173d547f9f7a47aa351d2a6302"}, + {file = "dff-0.6.4.dev0.tar.gz", hash = "sha256:06a44f7e43e137208bc02615e95492526afc20a9078688d5be34fdb01934bd64"}, +] [package.dependencies] colorama = "*" @@ -283,12 +319,6 @@ stats = ["omegaconf", "opentelemetry-exporter-otlp (>=1.20.0)", "opentelemetry-i telegram = ["pytelegrambotapi"] ydb = ["six", "ydb"] -[package.source] -type = "git" -url = "https://github.com/deeppavlov/dialog_flow_framework.git" -reference = "4b4cf69dd96e1ed21ec3bf9d5950c010616b24cc" -resolved_reference = "4b4cf69dd96e1ed21ec3bf9d5950c010616b24cc" - [[package]] name = "exceptiongroup" version = "1.2.0" @@ -305,23 +335,71 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.110.0" +version = "0.110.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"}, - {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"}, + {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, + {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.36.3,<0.37.0" +starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6" +files = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] + [[package]] name = "h11" version = "0.14.0" @@ -333,6 +411,27 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + [[package]] name = "httptools" version = "0.6.1" @@ -381,17 +480,83 @@ files = [ [package.extras] test = ["Cython (>=0.29.24,<0.30.0)"] +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx-ws" +version = "0.6.0" +description = "WebSockets support for HTTPX" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx_ws-0.6.0-py3-none-any.whl", hash = "sha256:437cfca94519a4e6ae06eb5573192df6c0da85c22b1a19cc1ea0b02b05a51d25"}, + {file = "httpx_ws-0.6.0.tar.gz", hash = "sha256:60218f531fb474a2143af38568f4b7d94ba356780973443365c8e2c87882bb8c"}, +] + +[package.dependencies] +anyio = ">=4" +httpcore = ">=1.0.4" +httpx = ">=0.23.1" +wsproto = "*" + [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "jinja2" version = "3.1.3" @@ -502,6 +667,17 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -513,6 +689,17 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -539,20 +726,83 @@ files = [ antlr4-python3-runtime = "==4.9.*" PyYAML = ">=5.1.0" +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] + [[package]] name = "pydantic" -version = "2.6.3" +version = "2.7.0" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, + {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.1" typing-extensions = ">=4.6.1" [package.extras] @@ -560,90 +810,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.1" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, + {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, + {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, + {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, + {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, + {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, + {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, + {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, + {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, + {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, + {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, + {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, ] [package.dependencies] @@ -668,6 +918,17 @@ python-dotenv = ">=0.21.0" toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] + [[package]] name = "pygments" version = "2.17.2" @@ -683,6 +944,63 @@ files = [ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.6" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -838,6 +1156,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -851,13 +1180,13 @@ files = [ [[package]] name = "starlette" -version = "0.36.3" +version = "0.37.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] @@ -877,15 +1206,26 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "typer" -version = "0.9.0" +version = "0.9.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.6" files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, ] [package.dependencies] @@ -896,7 +1236,7 @@ typing-extensions = ">=3.7.4.3" all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "types-python-dateutil" @@ -911,13 +1251,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -939,13 +1279,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.28.0" +version = "0.28.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.28.0-py3-none-any.whl", hash = "sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1"}, - {file = "uvicorn-0.28.0.tar.gz", hash = "sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067"}, + {file = "uvicorn-0.28.1-py3-none-any.whl", hash = "sha256:5162f6d652f545be91b1feeaee8180774af143965ca9dc8a47ff1dc6bafa4ad5"}, + {file = "uvicorn-0.28.1.tar.gz", hash = "sha256:08103e79d546b6cf20f67c7e5e434d2cf500a6e29b28773e407250c54fc4fa3c"}, ] [package.dependencies] @@ -1254,7 +1594,21 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c38c78d758710b2271c18b10a032af520ac530928bf102b4dc86907820f63b08" +content-hash = "ffb8f821b686b31e33b85141af24b10e893419ee40bc86c834596800351a810c" diff --git a/backend/df_designer/pyproject.toml b/backend/df_designer/pyproject.toml index 705df010..55988888 100644 --- a/backend/df_designer/pyproject.toml +++ b/backend/df_designer/pyproject.toml @@ -1,8 +1,13 @@ [tool.poetry] -name = "df_designer" -version = "0.1.0" -description = "" -authors = ["Ramimashkouk "] +name = "dflowd" +version = "0.1.0-beta.0" +description = "Dialog Flow Designer" +license = "Apache-2.0" +authors = [ + "Denis Kuznetsov ", + "Maks Rogatkin ", + "Rami Mashkouk ", +] readme = "README.md" packages = [{include = "app"}] @@ -15,8 +20,22 @@ typer = "^0.9.0" pydantic-settings = "^2.2.1" aiofiles = "^23.2.1" cookiecutter = "^2.6.0" -dff = { git = "https://github.com/deeppavlov/dialog_flow_framework.git", rev = "4b4cf69dd96e1ed21ec3bf9d5950c010616b24cc" } +dff = "==0.6.4.dev0" omegaconf = "^2.3.0" +pytest = "^8.1.1" +pytest-asyncio = "^0.23.6" +pytest-mock = "^3.14.0" +httpx = "^0.27.0" +httpx-ws = "^0.6.0" +gitpython = "^3.1.43" [tool.poetry.scripts] dflowd = "app.cli:cli" + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +isort = "^5" +black = "^22" +flake8 = "^4" diff --git a/compose.yaml b/compose.yaml index eaf18aa7..9a4b6c76 100644 --- a/compose.yaml +++ b/compose.yaml @@ -1,12 +1,15 @@ +volumes: + project_data: + services: backend: build: args: - PROJECT_DIR: brand_new + PROJECT_DIR: df_designer_project context: ./ dockerfile: Dockerfile ports: - 8000:8000 volumes: - - ./volume:/src/ + - project_data:/src2/df_designer_project version: '3.8' diff --git a/cypress.config.ts b/cypress.config.ts new file mode 100644 index 00000000..8959a4f2 --- /dev/null +++ b/cypress.config.ts @@ -0,0 +1,7 @@ +export default { + e2e: { + setupNodeEvents(on, config) { + // implement node event listeners here + }, + }, +}; diff --git a/frontend/bun.lockb b/frontend/bun.lockb index 1325b5fb..c6655d44 100755 Binary files a/frontend/bun.lockb and b/frontend/bun.lockb differ diff --git a/frontend/cypress.config.ts b/frontend/cypress.config.ts new file mode 100644 index 00000000..17161e32 --- /dev/null +++ b/frontend/cypress.config.ts @@ -0,0 +1,9 @@ +import { defineConfig } from "cypress"; + +export default defineConfig({ + e2e: { + setupNodeEvents(on, config) { + // implement node event listeners here + }, + }, +}); diff --git a/frontend/cypress/e2e/spec.cy.ts b/frontend/cypress/e2e/spec.cy.ts new file mode 100644 index 00000000..762f70b5 --- /dev/null +++ b/frontend/cypress/e2e/spec.cy.ts @@ -0,0 +1,78 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +describe("check run and edit flow", () => { + it("add run edit flow", () => { + cy.visit("http://localhost:5173/app/home") + cy.location("pathname").should("eq", "/app/home") + cy.get("[data-testid=create-flow-btn]").should("exist") + cy.get("[data-testid=create-flow-btn]").click({ force: true }) + cy.get("[data-testid=flow-name-input]").should("exist") + cy.get("[data-testid=flow-name-input]").click({ force: true }).type("flow1") + cy.get("[data-testid=flow-color-FFCC00]").should("exist") + cy.get("[data-testid=flow-color-FFCC00]").click({ force: true }) + cy.wait(100) + cy.get("[data-testid=flow-save-btn").should("exist") + cy.get("[data-testid=flow-save-btn").click({ force: true }) + cy.get("[data-testid=flow1-edit-btn]").should("exist") + cy.get("[data-testid=flow1-edit-btn]").click({ force: true }) + cy.location("pathname").should("eq", "/app/flow/flow1") + cy.get("[data-testid=flow-page]").should("exist") + cy.get("[data-testid=nodes-collapse-btn]").click({ force: true }) + cy.wait(500) + cy.get("[data-testid=default_node-item]").should("exist") + let nodes: any[] = [] + cy.request("http://localhost:8000/api/v1/flows").then((response) => { + nodes = response.body.data.flows.find((flow) => flow.name === "flow1").data.nodes + console.log(nodes) + }) + const dataTransfer = new DataTransfer() + cy.get("[data-testid=default_node-item]").trigger("dragstart", { dataTransfer }) + cy.get("[data-testid=rf__wrapper]").trigger("drop", { dataTransfer }) + cy.get("[data-testid=defaultnode-add-condition-btn]").should("exist") + cy.get("[data-testid=defaultnode-add-condition-btn]").click({ force: true }) + cy.wait(300) + cy.get("[data-testid=python-condition-editor]").should("exist") + cy.get("[data-testid=python-condition-editor]").type('if False: print("hello")') + cy.get("[data-testid=save-condition-button]").should("exist").click({ force: true }) + cy.get("[data-testid=rf__wrapper]").trigger("keydown", { ctrlKey: true, key: "s" }) + cy.wait(500) + cy.get("[data-testid=build-btn]").should("exist") + cy.get("[data-testid=build-btn]").click() + cy.wait(4000) + cy.get("[data-testid=build-menu-open-btn]").should("exist") + cy.get("[data-testid=build-menu-open-btn]").click() + cy.wait(300) + cy.get("[data-testid=run-btn]").should("exist") + cy.get("[data-testid=run-btn]").click() + cy.wait(1000) + cy.get("[data-testid=chat-btn]").should("exist") + cy.get("[data-testid=chat-btn]").click() + cy.get("[data-testid=chat-input]").should("exist") + cy.get("[data-testid=chat-input]").click() + cy.get("[data-testid=chat-input]").type("hello") + cy.get("[data-testid=chat-send]").should("exist") + cy.get("[data-testid=chat-send]").click() + cy.wait(300) + cy.get("[data-testid=bot-message]").should("exist") + cy.wrap(null).then(() => { + let currNodes: any[] = [] + cy.request("http://localhost:8000/api/v1/flows").then((response) => { + currNodes = response.body.data.flows.find((flow) => flow.name === "flow1").data.nodes + expect(currNodes.length - 1).to.eq(nodes.length) + const node = cy.get(`#${currNodes[currNodes.length - 1].id}`) + node.should("exist") + node.click({ force: true }) + const deletebtn = cy.get("[data-testid=header-button-delete-node]") + deletebtn.should("exist") + deletebtn.click() + cy.request("http://localhost:8000/api/v1/flows").then((response) => { + currNodes = response.body.data.flows.find((flow) => flow.name === "flow1").data.nodes + expect(currNodes.length).to.eq(nodes.length) + }) + cy.visit("http://localhost:5173/app/home") + cy.get("[data-testid=flow1-delete-btn]").should("exist") + cy.get("[data-testid=flow1-delete-btn]").click({ force: true }) + cy.get("[data-testid=flow1-edit-btn]").should("not.exist") + }) + }) + }) +}) diff --git a/frontend/cypress/fixtures/example.json b/frontend/cypress/fixtures/example.json new file mode 100644 index 00000000..02e42543 --- /dev/null +++ b/frontend/cypress/fixtures/example.json @@ -0,0 +1,5 @@ +{ + "name": "Using fixtures to represent data", + "email": "hello@cypress.io", + "body": "Fixtures are a great way to mock data for responses to routes" +} diff --git a/frontend/cypress/support/commands.ts b/frontend/cypress/support/commands.ts new file mode 100644 index 00000000..698b01a4 --- /dev/null +++ b/frontend/cypress/support/commands.ts @@ -0,0 +1,37 @@ +/// +// *********************************************** +// This example commands.ts shows you how to +// create various custom commands and overwrite +// existing commands. +// +// For more comprehensive examples of custom +// commands please read more here: +// https://on.cypress.io/custom-commands +// *********************************************** +// +// +// -- This is a parent command -- +// Cypress.Commands.add('login', (email, password) => { ... }) +// +// +// -- This is a child command -- +// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... }) +// +// +// -- This is a dual command -- +// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... }) +// +// +// -- This will overwrite an existing command -- +// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... }) +// +// declare global { +// namespace Cypress { +// interface Chainable { +// login(email: string, password: string): Chainable +// drag(subject: string, options?: Partial): Chainable +// dismiss(subject: string, options?: Partial): Chainable +// visit(originalFn: CommandOriginalFn, url: string, options: Partial): Chainable +// } +// } +// } \ No newline at end of file diff --git a/frontend/cypress/support/e2e.ts b/frontend/cypress/support/e2e.ts new file mode 100644 index 00000000..f80f74f8 --- /dev/null +++ b/frontend/cypress/support/e2e.ts @@ -0,0 +1,20 @@ +// *********************************************************** +// This example support/e2e.ts is processed and +// loaded automatically before your test files. +// +// This is a great place to put global configuration and +// behavior that modifies Cypress. +// +// You can change the location of this file or turn off +// automatically serving support files with the +// 'supportFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/configuration +// *********************************************************** + +// Import commands.js using ES2015 syntax: +import './commands' + +// Alternatively you can use CommonJS syntax: +// require('./commands') \ No newline at end of file diff --git a/frontend/package.json b/frontend/package.json index 958c2e4c..cfcd8f04 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -7,20 +7,49 @@ "dev": "vite", "build": "tsc && vite build", "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", - "preview": "vite preview" + "preview": "vite preview", + "test": "jest", + "e2e": "cypress run", + "e2e:headed": "cypress run --headed", + "e2e:chrome": "cypress run --browser chrome" + }, + "jest": { + "transform": { + "^.+\\.tsx?$": "ts-jest" + }, + "moduleNameMapper": { + "\\.(css|less)$": "identity-obj-proxy" + }, + "testEnvironment": "jsdom" }, "dependencies": { + "@babel/preset-env": "^7.24.5", + "@babel/preset-react": "^7.24.1", + "@babel/preset-typescript": "^7.24.1", + "@codemirror/lang-python": "^6.1.5", + "@jest/globals": "^29.7.0", "@nextui-org/react": "^2.2.9", "@radix-ui/react-context-menu": "^2.1.5", "@react-spring/web": "^9.7.3", "@rollup/rollup-linux-arm64-gnu": "4.13.0", - "@types/lodash": "^4.17.0", - "@types/uuid": "^9.0.8", + "@testing-library/cypress": "^10.0.1", + "@testing-library/jest-dom": "^6.4.5", + "@testing-library/react": "^15.0.6", + "@testing-library/user-event": "^14.5.2", "@uidotdev/usehooks": "^2.4.1", + "@uiw/codemirror-theme-andromeda": "^4.21.25", + "@uiw/codemirror-theme-noctis-lilac": "^4.21.25", + "@uiw/react-codemirror": "^4.21.25", "axios": "^1.6.7", + "babel-jest": "^29.7.0", "classnames": "^2.5.1", "esbuild-wasm": "0.20.2", "framer-motion": "^11.0.6", + "identity-obj-proxy": "^3.0.0", + "jest": "^29.7.0", + "jest-environment-jsdom": "^29.7.0", + "jest-fetch-mock": "^3.0.3", + "jsdom": "^24.0.0", "lodash": "^4.17.21", "lucide-react": "^0.343.0", "random-words": "^2.0.1", @@ -29,14 +58,21 @@ "react-hot-toast": "^2.4.1", "react-intersection-observer": "^9.8.1", "react-router-dom": "^6.22.2", + "react-test-renderer": "^18.3.1", "react-xarrows": "^2.0.2", "reactflow": "^11.10.4", + "ts-jest": "^29.1.2", + "ts-node": "^10.9.2", "uuid": "^9.0.1", "yaml": "^2.4.1" }, "devDependencies": { - "@types/react": "^18.2.56", - "@types/react-dom": "^18.2.19", + "@types/cypress": "^1.1.3", + "@types/jest": "^29.5.12", + "@types/lodash": "^4.17.0", + "@types/react": "^18.3.1", + "@types/react-dom": "^18.3.0", + "@types/uuid": "^9.0.8", "@typescript-eslint/eslint-plugin": "^7.0.2", "@typescript-eslint/parser": "^7.0.2", "@vitejs/plugin-react": "^4.2.1", @@ -47,6 +83,8 @@ "postcss": "^8.4.35", "tailwindcss": "^3.4.1", "typescript": "^5.2.2", - "vite": "^5.1.4" - } -} + "vite": "^5.2.11", + "@types/bun": "latest" + }, + "module": "index.ts" +} \ No newline at end of file diff --git a/frontend/public/Inter-VariableFont_slnt,wght.ttf b/frontend/public/Inter-VariableFont_slnt,wght.ttf new file mode 100644 index 00000000..ec3164ef Binary files /dev/null and b/frontend/public/Inter-VariableFont_slnt,wght.ttf differ diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 5f65641d..b8684ca6 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,45 +1,47 @@ -import React, { useContext, useEffect, useMemo } from "react" -import { Outlet, useLocation, useNavigate } from "react-router-dom" -import { themeContext } from "./contexts/themeContext" -import { ToastOptions, Toaster } from "react-hot-toast" +import { NextUIProvider } from "@nextui-org/react" +import { RouterProvider, createBrowserRouter } from "react-router-dom" +import { ReactFlowProvider } from "reactflow" +import { Preloader } from "./UI/Preloader/Preloader" import ContextWrapper from "./contexts" +import { UndoRedoProvider } from "./contexts/undoRedoContext" +import Fallback from "./pages/Fallback" +import Flow from "./pages/Flow" +import Home from "./pages/Home" +import Index from "./pages/Index" const App = () => { - const { theme } = useContext(themeContext) - const navigate = useNavigate() - const { pathname } = useLocation() - - useEffect(() => { - if (pathname === "/app" || pathname === "/" || pathname === "" || pathname === "/app/") { - navigate("/app/home") - } - }, [navigate, pathname]) - - const toastOptions: ToastOptions = useMemo( - () => - theme === "light" - ? { - style: { - backgroundColor: "#fff", - color: "#333", - }, - position: "bottom-right", - } - : { - style: { - backgroundColor: "#333", - color: "#fff", - }, - position: "bottom-right", - }, - [theme] - ) + + const router = createBrowserRouter([ + { + path: "/", + element: ( + + + + ), + loader: Preloader, + errorElement: , + children: [ + { + path: "app/flow/:flowId", + element: ( + + + + + + ), + loader: Preloader, + }, + { path: "app/home", element: , loader: Preloader }, + ], + }, + ]) return ( -
- - -
+ + + ) } diff --git a/frontend/src/api/bot.ts b/frontend/src/api/bot.ts index 54c65a15..f8690f09 100644 --- a/frontend/src/api/bot.ts +++ b/frontend/src/api/bot.ts @@ -1,6 +1,4 @@ -import { v4 } from "uuid" import { $v1 } from "." -import { AxiosResponse } from "axios" export type buildApiStatusType = | "completed" @@ -27,13 +25,14 @@ export type buildMinifyApiType = { status: buildApiStatusType preset_end_status: string timestamp: number - run_ids: number[] + runs: runMinifyApiType[] } export type runMinifyApiType = { id: number status: buildApiStatusType preset_end_status: string + log_path: string timestamp: number build_id: number } @@ -76,21 +75,21 @@ export interface localBuildType extends buildMinifyApiType { type: "build" } -type buildsResponseType = buildApiType[] -// { +// type buildsResponseType = buildApiType[] +// // { +// // status?: string +// // build: buildApiType[] +// // } + +// type runsResponseType = { // status?: string -// build: buildApiType[] +// run: runApiType[] // } -type runsResponseType = { - status?: string - run: runApiType[] -} - -type runResponseType = { - status?: string - run_info: runApiType -} +// type runResponseType = { +// status?: string +// run_info: runApiType +// } type buildStatusResponseType = { status: buildApiStatusType @@ -131,7 +130,7 @@ export const build_status = async (build_id: number) => { export const get_builds = async () => { try { const { data }: { data: buildMinifyApiType[] } = await $v1.get("/bot/builds") - console.log(data) + // console.log(data) return data } catch (error) { console.log(error) @@ -152,7 +151,7 @@ export const get_build = async (build_id: number) => { export const get_runs = async () => { try { const { data }: { data: runMinifyApiType[] } = await $v1.get("/bot/runs") - console.log(data) + // console.log(data) return data } catch (error) { console.log(error) diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index 63f3870e..41a4cf83 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -1,6 +1,8 @@ import axios from "axios"; +import { VITE_BASE_API_URL } from "../env.consts"; +const baseURL = VITE_BASE_API_URL ?? "http://localhost:8000/api/v1" export const $v1 = axios.create({ - baseURL: import.meta.env.VITE_BASE_API_URL ?? "http://localhost:8000", + baseURL: baseURL }) \ No newline at end of file diff --git a/frontend/src/components/chat/Chat.tsx b/frontend/src/components/chat/Chat.tsx index c3686e6d..73d67872 100644 --- a/frontend/src/components/chat/Chat.tsx +++ b/frontend/src/components/chat/Chat.tsx @@ -1,19 +1,17 @@ -import React, { useContext, useEffect, useRef, useState } from "react" +import { Button, Textarea } from "@nextui-org/react" +import { a, useTransition } from "@react-spring/web" +import axios from "axios" +import { Paperclip, Send, Smile, X } from "lucide-react" +import { useContext, useEffect, useRef, useState } from "react" +import toast from "react-hot-toast" +import { useSearchParams } from "react-router-dom" import { buildContext } from "../../contexts/buildContext" +import { chatContext } from "../../contexts/chatContext" +import { runContext } from "../../contexts/runContext" import ChatIcon from "../../icons/buildmenu/ChatIcon" -import classNames from "classnames" -import { ChevronLeft, Paperclip, Send, Smile, X } from "lucide-react" import MonitorIcon from "../../icons/buildmenu/MonitorIcon" -import { Button, Textarea } from "@nextui-org/react" -import { useSearchParams } from "react-router-dom" import { parseSearchParams } from "../../utils" -import { chatContext, messageType } from "../../contexts/chatContext" -import { useTransition, a } from "@react-spring/web" import EmojiPicker, { EmojiType } from "./EmojiPicker" -import axios from "axios" -import { runContext } from "../../contexts/runContext" -import toast from "react-hot-toast" -import { parse, stringify } from "yaml" const Chat = () => { const { logsPage, setLogsPage } = useContext(buildContext) @@ -25,19 +23,20 @@ const Chat = () => { const [isEmoji, setIsEmoji] = useState(false) const [emojis, setEmojis] = useState([]) + // eslint-disable-next-line @typescript-eslint/no-unused-vars const [emojisPending, setEmojisPending] = useState(false) useEffect(() => { const getEmojis = async () => { - setEmojisPending((prev) => true) + setEmojisPending(() => true) const emojis_data = await axios .get("https://emoji-api.com/emojis?access_key=4dd2f9e45b38e17c21b432caf8ac12206775bfef") - .finally(() => setEmojisPending((prev) => false)) + .finally(() => setEmojisPending(() => false)) return emojis_data } getEmojis() .then(({ data }) => { - setEmojis((prev) => data) + setEmojis(() => data) }) .catch(() => { console.log("emojis load error") @@ -48,7 +47,8 @@ const Chat = () => { const handleMessage = () => { if (messageValue) { - if (ws.current && ws.current.OPEN) { + if (ws.current && ws.current.readyState === 1) { + console.log(ws.current) // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore ws.current.send(messageValue) @@ -86,6 +86,7 @@ const Chat = () => { document.addEventListener("keydown", enterDownEvent) return () => document.removeEventListener("keydown", enterDownEvent) + // eslint-disable-next-line react-hooks/exhaustive-deps }, [messageValue, messages, setMessages]) const chatWindowRef = useRef(null) @@ -117,7 +118,7 @@ const Chat = () => { useEffect(() => { if (runStatus === "alive" && run) { - const socket = new WebSocket(`ws://localhost:8000/bot/run/connect?pid=${run.id}`) + const socket = new WebSocket(`ws://localhost:8000/api/v1/bot/run/connect?run_id=${run.id}`) socket.onopen = (e) => { console.log(e) toast.success("Chat was successfully connected!") @@ -127,7 +128,7 @@ const Chat = () => { if (event.data) { // console.log(event.data) const data = event.data.split(":")[2].split("attachments")[0].slice(0, -2) - console.log(data) + // console.log(data) setTimeout(() => { setMessages((prev) => [...prev, { message: data, type: "bot" }]) }, 500); @@ -142,11 +143,12 @@ const Chat = () => { return () => { ws.current?.close() } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [run, runStatus]) return (
@@ -189,6 +191,7 @@ const Chat = () => { className='h-[60vh] bg-chat border-b border-border px-2 py-2 overflow-y-scroll scrollbar-hide flex flex-col gap-2'> {messagesT((style, m) => ( { className={`origin-bottom-right`}> { + onEmojiClick={(emoji) => { setMessageValue((prev) => prev + emoji) }} lazy @@ -245,6 +248,7 @@ const Chat = () => {